import numpy as np
import pandas as pd
import seaborn as sns
from matplotlib import pyplot as plt
import warnings
warnings.filterwarnings("ignore")
sns.set()
from IPython.display import display
rdf = pd.read_csv("Rose.csv")
rdf.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 187 entries, 0 to 186 Data columns (total 2 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 YearMonth 187 non-null object 1 Rose 185 non-null float64 dtypes: float64(1), object(1) memory usage: 3.0+ KB
rdf.describe()
| Rose | |
|---|---|
| count | 185.000000 |
| mean | 90.394595 |
| std | 39.175344 |
| min | 28.000000 |
| 25% | 63.000000 |
| 50% | 86.000000 |
| 75% | 112.000000 |
| max | 267.000000 |
rdf.head()
| YearMonth | Rose | |
|---|---|---|
| 0 | 1980-01 | 112.0 |
| 1 | 1980-02 | 118.0 |
| 2 | 1980-03 | 129.0 |
| 3 | 1980-04 | 99.0 |
| 4 | 1980-05 | 116.0 |
rdf.tail()
| YearMonth | Rose | |
|---|---|---|
| 182 | 1995-03 | 45.0 |
| 183 | 1995-04 | 52.0 |
| 184 | 1995-05 | 28.0 |
| 185 | 1995-06 | 40.0 |
| 186 | 1995-07 | 62.0 |
rdf[rdf["Rose"].isnull()]
| YearMonth | Rose | |
|---|---|---|
| 174 | 1994-07 | NaN |
| 175 | 1994-08 | NaN |
rdf = rdf.interpolate()
rdf[rdf["Rose"].isnull()]
| YearMonth | Rose |
|---|
Date = pd.date_range(start="1980-01-01", periods=187, freq="M")
Date
DatetimeIndex(['1980-01-31', '1980-02-29', '1980-03-31', '1980-04-30',
'1980-05-31', '1980-06-30', '1980-07-31', '1980-08-31',
'1980-09-30', '1980-10-31',
...
'1994-10-31', '1994-11-30', '1994-12-31', '1995-01-31',
'1995-02-28', '1995-03-31', '1995-04-30', '1995-05-31',
'1995-06-30', '1995-07-31'],
dtype='datetime64[ns]', length=187, freq='M')
rdf["Date"] = Date
rdf.drop("YearMonth", axis=1, inplace=True)
rdf.set_index("Date", inplace=True)
rdf.head()
| Rose | |
|---|---|
| Date | |
| 1980-01-31 | 112.0 |
| 1980-02-29 | 118.0 |
| 1980-03-31 | 129.0 |
| 1980-04-30 | 99.0 |
| 1980-05-31 | 116.0 |
rdf.index.freq = "M"
from pylab import rcParams # or we can write plt.rcParams['figure.figsize'] = 15,8
rcParams['figure.figsize'] = 15,8
rdf.plot()
plt.show()
sns.boxplot(x = rdf.index.year, y = rdf["Rose"])
plt.xlabel('Year')
plt.show()
sns.boxplot(x = rdf.index.month_name(), y = rdf["Rose"])
plt.xlabel('Month')
plt.show()
from statsmodels.graphics.tsaplots import month_plot
month_plot(rdf, ylabel="Rose Wine Sales")
from statsmodels.tsa.seasonal import seasonal_decompose
decomposition = seasonal_decompose(rdf, model="multiplicative")
decomposition.plot()
trend = decomposition.trend
seasonality = decomposition.seasonal
residual = decomposition.resid
print("Trend", "\n", trend.head(12), "\n")
print("Seasonality", "\n", seasonality.head(12), "\n")
print("Residual", "\n", residual.head(12), "\n")
Trend Date 1980-01-31 NaN 1980-02-29 NaN 1980-03-31 NaN 1980-04-30 NaN 1980-05-31 NaN 1980-06-30 NaN 1980-07-31 147.083333 1980-08-31 148.125000 1980-09-30 148.375000 1980-10-31 148.083333 1980-11-30 147.416667 1980-12-31 145.125000 Freq: M, Name: trend, dtype: float64 Seasonality Date 1980-01-31 0.670111 1980-02-29 0.806163 1980-03-31 0.901164 1980-04-30 0.854024 1980-05-31 0.889415 1980-06-30 0.923985 1980-07-31 1.058038 1980-08-31 1.035881 1980-09-30 1.017648 1980-10-31 1.022573 1980-11-30 1.192349 1980-12-31 1.628646 Freq: M, Name: seasonal, dtype: float64 Residual Date 1980-01-31 NaN 1980-02-29 NaN 1980-03-31 NaN 1980-04-30 NaN 1980-05-31 NaN 1980-06-30 NaN 1980-07-31 0.758258 1980-08-31 0.840720 1980-09-30 1.357674 1980-10-31 0.970771 1980-11-30 0.853378 1980-12-31 1.129646 Freq: M, Name: resid, dtype: float64
train = rdf[rdf.index.year < 1991]
test = rdf[rdf.index.year >= 1991]
print(train)
print(test)
Rose
Date
1980-01-31 112.0
1980-02-29 118.0
1980-03-31 129.0
1980-04-30 99.0
1980-05-31 116.0
... ...
1990-08-31 70.0
1990-09-30 83.0
1990-10-31 65.0
1990-11-30 110.0
1990-12-31 132.0
[132 rows x 1 columns]
Rose
Date
1991-01-31 54.000000
1991-02-28 55.000000
1991-03-31 66.000000
1991-04-30 65.000000
1991-05-31 60.000000
1991-06-30 65.000000
1991-07-31 96.000000
1991-08-31 55.000000
1991-09-30 71.000000
1991-10-31 63.000000
1991-11-30 74.000000
1991-12-31 106.000000
1992-01-31 34.000000
1992-02-29 47.000000
1992-03-31 56.000000
1992-04-30 53.000000
1992-05-31 53.000000
1992-06-30 55.000000
1992-07-31 67.000000
1992-08-31 52.000000
1992-09-30 46.000000
1992-10-31 51.000000
1992-11-30 58.000000
1992-12-31 91.000000
1993-01-31 33.000000
1993-02-28 40.000000
1993-03-31 46.000000
1993-04-30 45.000000
1993-05-31 41.000000
1993-06-30 55.000000
1993-07-31 57.000000
1993-08-31 54.000000
1993-09-30 46.000000
1993-10-31 52.000000
1993-11-30 48.000000
1993-12-31 77.000000
1994-01-31 30.000000
1994-02-28 35.000000
1994-03-31 42.000000
1994-04-30 48.000000
1994-05-31 44.000000
1994-06-30 45.000000
1994-07-31 45.333333
1994-08-31 45.666667
1994-09-30 46.000000
1994-10-31 51.000000
1994-11-30 63.000000
1994-12-31 84.000000
1995-01-31 30.000000
1995-02-28 39.000000
1995-03-31 45.000000
1995-04-30 52.000000
1995-05-31 28.000000
1995-06-30 40.000000
1995-07-31 62.000000
train.shape, test.shape
((132, 1), (55, 1))
train["Rose"].plot(legend = True, label = "Train", fontsize = 14)
test["Rose"].plot(legend = True, label = "Test", fontsize = 14)
plt.show()
We are going to regress the "Rose" variable against the order of the occurrence. For this we need to modify our training data before fitting it into a linear regression.
train_time = [i + 1 for i in range(len(train))]
test_time = [i + 133 for i in range(len(test))]
print("Train Length:", len(train), "Test Length:", len(test))
Train Length: 132 Test Length: 55
LinearRegression_train = train.copy()
LinearRegression_test = test.copy()
LinearRegression_train['time'] = train_time
LinearRegression_test['time'] = test_time
display(LinearRegression_train.head())
display(LinearRegression_train.tail())
| Rose | time | |
|---|---|---|
| Date | ||
| 1980-01-31 | 112.0 | 1 |
| 1980-02-29 | 118.0 | 2 |
| 1980-03-31 | 129.0 | 3 |
| 1980-04-30 | 99.0 | 4 |
| 1980-05-31 | 116.0 | 5 |
| Rose | time | |
|---|---|---|
| Date | ||
| 1990-08-31 | 70.0 | 128 |
| 1990-09-30 | 83.0 | 129 |
| 1990-10-31 | 65.0 | 130 |
| 1990-11-30 | 110.0 | 131 |
| 1990-12-31 | 132.0 | 132 |
from sklearn.linear_model import LinearRegression
lr = LinearRegression()
lr.fit(LinearRegression_train[["time"]], LinearRegression_train["Rose"])
LinearRegression()
test_predictions_model1 = lr.predict(LinearRegression_test[['time']])
LinearRegression_test['RegOnTime'] = test_predictions_model1
LinearRegression_test.head(12)
| Rose | time | RegOnTime | |
|---|---|---|---|
| Date | |||
| 1991-01-31 | 54.0 | 133 | 72.063266 |
| 1991-02-28 | 55.0 | 134 | 71.568888 |
| 1991-03-31 | 66.0 | 135 | 71.074511 |
| 1991-04-30 | 65.0 | 136 | 70.580133 |
| 1991-05-31 | 60.0 | 137 | 70.085755 |
| 1991-06-30 | 65.0 | 138 | 69.591377 |
| 1991-07-31 | 96.0 | 139 | 69.096999 |
| 1991-08-31 | 55.0 | 140 | 68.602621 |
| 1991-09-30 | 71.0 | 141 | 68.108243 |
| 1991-10-31 | 63.0 | 142 | 67.613866 |
| 1991-11-30 | 74.0 | 143 | 67.119488 |
| 1991-12-31 | 106.0 | 144 | 66.625110 |
train["Rose"].plot(legend = True, label = 'Train')
test["Rose"].plot(legend = True, label = 'Test')
LinearRegression_test['RegOnTime'].plot(legend = True, label = 'Test Preds using Linear Regression')
plt.show()
from statsmodels.tools.eval_measures import rmse
rmse_model1_test = rmse(test["Rose"], test_predictions_model1)
rmse_model1_test
15.268955197146559
resultsDf = pd.DataFrame({"Test RMSE": [rmse_model1_test]}, index = ["RegressionOnTime"])
resultsDf
| Test RMSE | |
|---|---|
| RegressionOnTime | 15.268955 |
NaiveModel_train = train.copy()
NaiveModel_test = test.copy()
NaiveModel_test["naive"] = train["Rose"][len(train["Rose"])-1]
NaiveModel_test["naive"].head()
Date 1991-01-31 132.0 1991-02-28 132.0 1991-03-31 132.0 1991-04-30 132.0 1991-05-31 132.0 Freq: M, Name: naive, dtype: float64
train["Rose"].plot(legend = True, label = "Train")
test["Rose"].plot(legend = True, label = "Test")
NaiveModel_test["naive"].plot(legend = True, label = "Naive Model Test Preds")
plt.show()
rmse_model2_test = rmse(test["Rose"], NaiveModel_test["naive"])
print("RMSE for Naive Bayes",rmse_model2_test)
resultsDf_2 = pd.DataFrame({"Test RMSE": [rmse_model2_test]}, index=["NaiveModel"])
resultsDf = pd.concat([resultsDf, resultsDf_2])
display(resultsDf)
RMSE for Naive Bayes 79.71877337417347
| Test RMSE | |
|---|---|
| RegressionOnTime | 15.268955 |
| NaiveModel | 79.718773 |
SimpleAverage_train = train.copy()
SimpleAverage_test = test.copy()
SimpleAverage_test["mean_Rose"] = train["Rose"].mean()
SimpleAverage_test.head()
| Rose | mean_Rose | |
|---|---|---|
| Date | ||
| 1991-01-31 | 54.0 | 104.939394 |
| 1991-02-28 | 55.0 | 104.939394 |
| 1991-03-31 | 66.0 | 104.939394 |
| 1991-04-30 | 65.0 | 104.939394 |
| 1991-05-31 | 60.0 | 104.939394 |
train["Rose"].plot(legend = True, label = "Train")
test["Rose"].plot(legend = True, label = "Test")
SimpleAverage_test["mean_Rose"].plot(legend = True, label = "Simple Avg Test Predictions")
plt.show()
rmse_model3_test = rmse(test["Rose"], SimpleAverage_test["mean_Rose"])
print("RMSE for Simple Average Model is", rmse_model3_test)
RMSE for Simple Average Model is 53.460569646114436
reultsDf_3 = pd.DataFrame({"Test RMSE": [rmse_model3_test]}, index = ["SimpleAverageModel"])
resultsDf = pd.concat([resultsDf, reultsDf_3])
resultsDf
| Test RMSE | |
|---|---|
| RegressionOnTime | 15.268955 |
| NaiveModel | 79.718773 |
| SimpleAverageModel | 53.460570 |
For the moving average model, we are going to calculate rolling means (or moving averages) for different intervals. The best interval can be determined by the maximum accuracy (or the minimum error) over here.
For Moving Average, we are going to average over the entire data.
MovingAverage = rdf.copy()
MovingAverage.head()
| Rose | |
|---|---|
| Date | |
| 1980-01-31 | 112.0 |
| 1980-02-29 | 118.0 |
| 1980-03-31 | 129.0 |
| 1980-04-30 | 99.0 |
| 1980-05-31 | 116.0 |
MovingAverage["Trailing_2"] = MovingAverage["Rose"].rolling(2).mean()
MovingAverage["Trailing_4"] = MovingAverage["Rose"].rolling(4).mean()
MovingAverage["Trailing_6"] = MovingAverage["Rose"].rolling(6).mean()
MovingAverage["Trailing_9"] = MovingAverage["Rose"].rolling(9).mean()
MovingAverage.head()
| Rose | Trailing_2 | Trailing_4 | Trailing_6 | Trailing_9 | |
|---|---|---|---|---|---|
| Date | |||||
| 1980-01-31 | 112.0 | NaN | NaN | NaN | NaN |
| 1980-02-29 | 118.0 | 115.0 | NaN | NaN | NaN |
| 1980-03-31 | 129.0 | 123.5 | NaN | NaN | NaN |
| 1980-04-30 | 99.0 | 114.0 | 114.5 | NaN | NaN |
| 1980-05-31 | 116.0 | 107.5 | 115.5 | NaN | NaN |
plt.figure(figsize=(16,8))
plt.plot(MovingAverage["Rose"], label="Train")
plt.plot(MovingAverage["Trailing_2"], label="2 point Moving Average")
plt.plot(MovingAverage["Trailing_4"], label="4 point Moving Average")
plt.plot(MovingAverage["Trailing_6"], label="6 point Moving Average")
plt.plot(MovingAverage["Trailing_9"], label="9 point Moving Average")
plt.legend(loc="best")
plt.grid()
plt.show()
trailing_MovingAverage_train = MovingAverage[MovingAverage.index.year < 1991]
trailing_MovingAverage_test = MovingAverage[MovingAverage.index.year >= 1991]
plt.figure(figsize=(16,8))
plt.plot(trailing_MovingAverage_train["Rose"], label="Train")
plt.plot(trailing_MovingAverage_test["Rose"], label="Test")
plt.plot(trailing_MovingAverage_train["Trailing_2"], label="2 point Trailing Moving Average on Training Set")
plt.plot(trailing_MovingAverage_train["Trailing_4"], label="4 point Trailing Moving Average on Training Set")
plt.plot(trailing_MovingAverage_train["Trailing_6"], label="6 point Trailing Moving Average on Training Set")
plt.plot(trailing_MovingAverage_train["Trailing_9"], label="9 point Trailing Moving Average on Training Set")
plt.plot(trailing_MovingAverage_test["Trailing_2"], label="2 point Trailing Moving Average on Test Set")
plt.plot(trailing_MovingAverage_test["Trailing_4"], label="4 point Trailing Moving Average on Test Set")
plt.plot(trailing_MovingAverage_test["Trailing_6"], label="6 point Trailing Moving Average on Test Set")
plt.plot(trailing_MovingAverage_test["Trailing_9"], label="9 point Trailing Moving Average on Test Set")
plt.legend(loc="best")
plt.grid()
plt.show()
rmse_model4_test_2 = rmse(test["Rose"], trailing_MovingAverage_test["Trailing_2"])
print("Rmse for trailing_2", rmse_model4_test_2)
rmse_model4_test_4 = rmse(test["Rose"], trailing_MovingAverage_test["Trailing_4"])
print("Rmse for trailing_4", rmse_model4_test_4)
rmse_model4_test_6 = rmse(test["Rose"], trailing_MovingAverage_test["Trailing_6"])
print("Rmse for trailing_6", rmse_model4_test_6)
rmse_model4_test_9 = rmse(test["Rose"], trailing_MovingAverage_test["Trailing_9"])
print("Rmse for trailing_9", rmse_model4_test_9)
resultsDf_4 = pd.DataFrame({"Test RMSE": [rmse_model4_test_2, rmse_model4_test_4, rmse_model4_test_6, rmse_model4_test_9]},
index = ["2_point_trailing_Moving_Average", "4_point_trailing_Moving_Average", "6_point_trailing_Moving_Average", "9_point_trailing_Moving_Average"])
resultsDf = pd.concat([resultsDf, resultsDf_4])
resultsDf
Rmse for trailing_2 11.52927761935857 Rmse for trailing_4 14.451403238286431 Rmse for trailing_6 14.566327265691354 Rmse for trailing_9 14.727629822363193
| Test RMSE | |
|---|---|
| RegressionOnTime | 15.268955 |
| NaiveModel | 79.718773 |
| SimpleAverageModel | 53.460570 |
| 2_point_trailing_Moving_Average | 11.529278 |
| 4_point_trailing_Moving_Average | 14.451403 |
| 6_point_trailing_Moving_Average | 14.566327 |
| 9_point_trailing_Moving_Average | 14.727630 |
train["Rose"].plot(legend=True, label="Train")
test["Rose"].plot(legend=True, label="Test")
LinearRegression_test["RegOnTime"].plot(legend=True, label="Test Preds using Linear Regression")
NaiveModel_test["naive"].plot(legend=True, label="Naive Model Test Preds")
SimpleAverage_test["mean_Rose"].plot(legend=True, label="Simple Avg Test Preds")
trailing_MovingAverage_test["Trailing_2"].plot(legend=True, label="Trailing MA 2 test preds")
plt.show()
from statsmodels.tsa.api import ExponentialSmoothing, SimpleExpSmoothing, Holt
SES_train = train.copy()
SES_test = test.copy()
model_SES = SimpleExpSmoothing(SES_train["Rose"]);
model_SES_autofit = model_SES.fit()
display("SES Params", model_SES_autofit.params)
SES_test["predict"] = model_SES_autofit.forecast(steps=len(test))
display(SES_test.head().style)
SES_train["Rose"].plot(legend=True, label="Train")
SES_test["Rose"].plot(legend=True, label="Test")
SES_test["predict"].plot(legend=True, label="SES Preds on Test")
plt.show()
'SES Params'
{'smoothing_level': 0.09874989825614361,
'smoothing_trend': nan,
'smoothing_seasonal': nan,
'damping_trend': nan,
'initial_level': 134.38702255613862,
'initial_trend': nan,
'initial_seasons': array([], dtype=float64),
'use_boxcox': False,
'lamda': None,
'remove_bias': False}
| Rose | predict | |
|---|---|---|
| Date | ||
| 1991-01-31 00:00:00 | 54.000000 | 87.104999 |
| 1991-02-28 00:00:00 | 55.000000 | 87.104999 |
| 1991-03-31 00:00:00 | 66.000000 | 87.104999 |
| 1991-04-30 00:00:00 | 65.000000 | 87.104999 |
| 1991-05-31 00:00:00 | 60.000000 | 87.104999 |
rmse_model5_test_1 = rmse(SES_test["Rose"], SES_test["predict"])
display(rmse_model5_test_1)
resultsDf_5 = pd.DataFrame({"Test RMSE": [+rmse_model5_test_1]}, index=["Alpha=0.102,SimpleExponentialSmoothing"])
resultsDf = pd.concat([resultsDf, resultsDf_5])
display(resultsDf)
36.79624208189247
| Test RMSE | |
|---|---|
| RegressionOnTime | 15.268955 |
| NaiveModel | 79.718773 |
| SimpleAverageModel | 53.460570 |
| 2_point_trailing_Moving_Average | 11.529278 |
| 4_point_trailing_Moving_Average | 14.451403 |
| 6_point_trailing_Moving_Average | 14.566327 |
| 9_point_trailing_Moving_Average | 14.727630 |
| Alpha=0.102,SimpleExponentialSmoothing | 36.796242 |
resultsDf_6 = pd.DataFrame({"Alpha Values":[], "Train RMSE": [], "Test RMSE": []})
alpha_list = [0.3,0.4,0.5,0.6,0.7,0.8,0.9]
for i in alpha_list:
model_SES_alpha_i = model_SES.fit(smoothing_level=i)
SES_train["predict",i] = model_SES_alpha_i.fittedvalues
SES_test["predict",i] = model_SES_alpha_i.forecast(steps=len(test))
rmse_model5_train_i = rmse(SES_train["Rose"],SES_train["predict",i])
rmse_model5_test_i = rmse(SES_test["Rose"],SES_test["predict",i])
resultsDf_6 = resultsDf_6.append({"Alpha Values":i,
"Train RMSE": rmse_model5_train_i,"Test RMSE":rmse_model5_test_i},
ignore_index=True)
display(SES_test.head().style)
display("Model Evaluation", resultsDf_6.sort_values(by=["Test RMSE"],ascending=True))
| Rose | predict | ('predict', 0.3) | ('predict', 0.4) | ('predict', 0.5) | ('predict', 0.6) | ('predict', 0.7) | ('predict', 0.8) | ('predict', 0.9) | |
|---|---|---|---|---|---|---|---|---|---|
| Date | |||||||||
| 1991-01-31 00:00:00 | 54.000000 | 87.104999 | 98.669021 | 105.260413 | 111.376470 | 116.885470 | 121.747572 | 125.925560 | 129.366777 |
| 1991-02-28 00:00:00 | 55.000000 | 87.104999 | 98.669021 | 105.260413 | 111.376470 | 116.885470 | 121.747572 | 125.925560 | 129.366777 |
| 1991-03-31 00:00:00 | 66.000000 | 87.104999 | 98.669021 | 105.260413 | 111.376470 | 116.885470 | 121.747572 | 125.925560 | 129.366777 |
| 1991-04-30 00:00:00 | 65.000000 | 87.104999 | 98.669021 | 105.260413 | 111.376470 | 116.885470 | 121.747572 | 125.925560 | 129.366777 |
| 1991-05-31 00:00:00 | 60.000000 | 87.104999 | 98.669021 | 105.260413 | 111.376470 | 116.885470 | 121.747572 | 125.925560 | 129.366777 |
'Model Evaluation'
| Alpha Values | Train RMSE | Test RMSE | |
|---|---|---|---|
| 0 | 0.3 | 32.448833 | 47.504821 |
| 1 | 0.4 | 33.028400 | 53.767406 |
| 2 | 0.5 | 33.680192 | 59.641786 |
| 3 | 0.6 | 34.439946 | 64.971288 |
| 4 | 0.7 | 35.322683 | 69.698162 |
| 5 | 0.8 | 36.334371 | 73.773992 |
| 6 | 0.9 | 37.482735 | 77.139276 |
plt.figure(figsize=(18,9))
plt.plot(SES_train["Rose"], label="Train")
plt.plot(SES_test["Rose"], label="Test")
plt.plot(SES_test["predict"], label="Alpha=1 Simple Exponential Smoothing predictions on Test Set")
plt.plot(SES_test["predict", 0.9], label="Alpha=0.9 Simple Exponential Smoothing predictions on Test Set")
plt.legend(loc="best")
plt.grid()
plt.show()
resultsDf_6_1 = pd.DataFrame({"Test RMSE": [resultsDf_6.sort_values(by=["Test RMSE"], ascending=True).values[0][2]]},
index = ["Alpha=0.9,SimpleExponentialSmoothing"])
resultsDf = pd.concat([resultsDf, resultsDf_6_1])
display(resultsDf.style)
| Test RMSE | |
|---|---|
| RegressionOnTime | 15.268955 |
| NaiveModel | 79.718773 |
| SimpleAverageModel | 53.460570 |
| 2_point_trailing_Moving_Average | 11.529278 |
| 4_point_trailing_Moving_Average | 14.451403 |
| 6_point_trailing_Moving_Average | 14.566327 |
| 9_point_trailing_Moving_Average | 14.727630 |
| Alpha=0.102,SimpleExponentialSmoothing | 36.796242 |
| Alpha=0.9,SimpleExponentialSmoothing | 47.504821 |
DES_train = train.copy()
DES_test = test.copy()
model_DES = Holt(DES_train["Rose"])
resultsDf_7 = pd.DataFrame({"Alpha Values": [], "Beta Values": [], "Train RMSE": [], "Test RMSE": []})
alpha_list = [0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]
beta_list = [0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]
for i in alpha_list:
for j in beta_list:
model_DES_alpha_i_j = model_DES.fit(smoothing_level=i,smoothing_trend=j)
DES_train["predict",i,j] = model_DES_alpha_i_j.fittedvalues
DES_test["predict",i,j] = model_DES_alpha_i_j.forecast(steps=len(test))
rmse_model6_train = rmse(DES_train["Rose"],DES_train["predict",i,j])
rmse_model6_test = rmse(DES_test["Rose"],DES_test["predict",i,j])
resultsDf_7 = resultsDf_7.append({"Alpha Values":i,
"Beta Values":j,
"Train RMSE": rmse_model6_train,
"Test RMSE": rmse_model6_test}, ignore_index=True)
display(resultsDf_7.sort_values(by=["Test RMSE"]).head(10))
| Alpha Values | Beta Values | Train RMSE | Test RMSE | |
|---|---|---|---|---|
| 0 | 0.3 | 0.3 | 35.928003 | 265.567594 |
| 8 | 0.4 | 0.3 | 36.733732 | 339.306534 |
| 1 | 0.3 | 0.4 | 37.356026 | 358.750942 |
| 16 | 0.5 | 0.3 | 37.424080 | 394.272629 |
| 24 | 0.6 | 0.3 | 38.343309 | 439.296033 |
| 9 | 0.4 | 0.4 | 37.982228 | 441.965558 |
| 2 | 0.3 | 0.5 | 38.828384 | 451.810230 |
| 32 | 0.7 | 0.3 | 39.554250 | 477.060557 |
| 17 | 0.5 | 0.4 | 38.557286 | 500.437734 |
| 40 | 0.8 | 0.3 | 41.034375 | 506.353933 |
plt.figure(figsize=(18,9))
plt.plot(DES_train["Rose"], label="Train")
plt.plot(DES_test["Rose"], label="Test")
plt.plot(DES_test["predict", 0.3, 0.4], label="Alpha=0.3,Beta=0.4,DoubleExponentialSmoothing predictions on Test Set")
plt.legend(loc="best")
plt.grid()
plt.show()
resultsDf_7_1 = pd.DataFrame({"Test RMSE": [resultsDf_7.sort_values(by=["Test RMSE"]).values[0][3]]},
index=["Alpha=0.3,Beta=0.4,DoubleExponentialSmoothing"])
resultsDf = pd.concat([resultsDf, resultsDf_7_1])
display(resultsDf)
| Test RMSE | |
|---|---|
| RegressionOnTime | 15.268955 |
| NaiveModel | 79.718773 |
| SimpleAverageModel | 53.460570 |
| 2_point_trailing_Moving_Average | 11.529278 |
| 4_point_trailing_Moving_Average | 14.451403 |
| 6_point_trailing_Moving_Average | 14.566327 |
| 9_point_trailing_Moving_Average | 14.727630 |
| Alpha=0.102,SimpleExponentialSmoothing | 36.796242 |
| Alpha=0.9,SimpleExponentialSmoothing | 47.504821 |
| Alpha=0.3,Beta=0.4,DoubleExponentialSmoothing | 265.567594 |
TES_train = train.copy()
TES_test = test.copy()
model_TES = ExponentialSmoothing(TES_train["Rose"], trend="additive", seasonal="multiplicative", freq="M")
model_TES_autofit = model_TES.fit()
display(model_TES_autofit.params)
TES_test["auto_predict"] = model_TES_autofit.forecast(steps=len(test)).round(0)
display(TES_test.head())
plt.figure(figsize=(18,9))
plt.plot(TES_train["Rose"], label="Train")
plt.plot(TES_test["Rose"], label="Test")
plt.plot(TES_test["auto_predict"], label="Alpha=0.0994,Beta=3.501,Gamma=0.0003,TripleExponentialSmoothing")
plt.legend(loc="best")
plt.title("Plot with Autofit")
plt.grid()
plt.show()
{'smoothing_level': 0.06467234615091698,
'smoothing_trend': 0.05315920636255018,
'smoothing_seasonal': 0.0,
'damping_trend': nan,
'initial_level': 50.880912909225756,
'initial_trend': -0.31656840824205823,
'initial_seasons': array([2.21583703, 2.51439498, 2.74693025, 2.40118428, 2.69936273,
2.94338111, 3.2353888 , 3.44052906, 3.26420741, 3.19365239,
3.72269442, 5.13435788]),
'use_boxcox': False,
'lamda': None,
'remove_bias': False}
| Rose | auto_predict | |
|---|---|---|
| Date | ||
| 1991-01-31 | 54.0 | 57.0 |
| 1991-02-28 | 55.0 | 64.0 |
| 1991-03-31 | 66.0 | 70.0 |
| 1991-04-30 | 65.0 | 61.0 |
| 1991-05-31 | 60.0 | 68.0 |
rmse_model6_test_1 = rmse(TES_test["Rose"], TES_test["auto_predict"])
display(rmse_model6_test_1)
resultsDf_8_1 = pd.DataFrame({"Test RMSE": [rmse_model6_test_1]}, index=["Alpha=0.0994,Beta=3.501,Gamma=0.0003,TripleExponentialSmoothing"])
resultsDf = pd.concat([resultsDf, resultsDf_8_1])
display(resultsDf)
21.224437982589023
| Test RMSE | |
|---|---|
| RegressionOnTime | 15.268955 |
| NaiveModel | 79.718773 |
| SimpleAverageModel | 53.460570 |
| 2_point_trailing_Moving_Average | 11.529278 |
| 4_point_trailing_Moving_Average | 14.451403 |
| 6_point_trailing_Moving_Average | 14.566327 |
| 9_point_trailing_Moving_Average | 14.727630 |
| Alpha=0.102,SimpleExponentialSmoothing | 36.796242 |
| Alpha=0.9,SimpleExponentialSmoothing | 47.504821 |
| Alpha=0.3,Beta=0.4,DoubleExponentialSmoothing | 265.567594 |
| Alpha=0.0994,Beta=3.501,Gamma=0.0003,TripleExponentialSmoothing | 21.224438 |
import warnings
warnings.filterwarnings('ignore')
resultsDf_8_2 = pd.DataFrame({"Alpha Values":[],"Beta Values":[],"Gamma Values":[],"Train RMSE":[],"Test RMSE": []})
gamma_list = [0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]
for i in alpha_list:
for j in beta_list:
for k in gamma_list:
model_TES_alpha_i_j_k = model_TES.fit(smoothing_level=i,
smoothing_trend=j,
smoothing_seasonal=k)
TES_train["predict",i,j,k] = model_TES_alpha_i_j_k.fittedvalues
TES_test["predict",i,j,k] = model_TES_alpha_i_j_k.forecast(steps=len(test))
rmse_model8_train = rmse(TES_train["Rose"],
TES_train["predict",i,j,k])
rmse_model8_test = rmse(TES_test["Rose"],
TES_test["predict",i,j,k])
resultsDf_8_2 = resultsDf_8_2.append({"Alpha Values":i,
"Beta Values":j,
"Gamma Values":k,
"Train RMSE":rmse_model8_train,
"Test RMSE":rmse_model8_test},ignore_index=True)
display(TES_test.head().style)
display(resultsDf_8_2.sort_values(by=["Test RMSE"]).head())
resultsDf_8_3 = pd.DataFrame({"Test RMSE": [resultsDf_8_2.sort_values(by=["Test RMSE"]).values[0][4]]}
,index=["Alpha=0.3,Beta=0.3,Gamma=0.8,TripleExponentialSmoothing"])
resultsDf = pd.concat([resultsDf, resultsDf_8_3])
display(resultsDf.sort_values(by=["Test RMSE"]))
| Rose | auto_predict | ('predict', 0.3, 0.3, 0.3) | ('predict', 0.3, 0.3, 0.4) | ('predict', 0.3, 0.3, 0.5) | ('predict', 0.3, 0.3, 0.6) | ('predict', 0.3, 0.3, 0.7) | ('predict', 0.3, 0.3, 0.8) | ('predict', 0.3, 0.3, 0.9) | ('predict', 0.3, 0.3, 1.0) | ('predict', 0.3, 0.4, 0.3) | ('predict', 0.3, 0.4, 0.4) | ('predict', 0.3, 0.4, 0.5) | ('predict', 0.3, 0.4, 0.6) | ('predict', 0.3, 0.4, 0.7) | ('predict', 0.3, 0.4, 0.8) | ('predict', 0.3, 0.4, 0.9) | ('predict', 0.3, 0.4, 1.0) | ('predict', 0.3, 0.5, 0.3) | ('predict', 0.3, 0.5, 0.4) | ('predict', 0.3, 0.5, 0.5) | ('predict', 0.3, 0.5, 0.6) | ('predict', 0.3, 0.5, 0.7) | ('predict', 0.3, 0.5, 0.8) | ('predict', 0.3, 0.5, 0.9) | ('predict', 0.3, 0.5, 1.0) | ('predict', 0.3, 0.6, 0.3) | ('predict', 0.3, 0.6, 0.4) | ('predict', 0.3, 0.6, 0.5) | ('predict', 0.3, 0.6, 0.6) | ('predict', 0.3, 0.6, 0.7) | ('predict', 0.3, 0.6, 0.8) | ('predict', 0.3, 0.6, 0.9) | ('predict', 0.3, 0.6, 1.0) | ('predict', 0.3, 0.7, 0.3) | ('predict', 0.3, 0.7, 0.4) | ('predict', 0.3, 0.7, 0.5) | ('predict', 0.3, 0.7, 0.6) | ('predict', 0.3, 0.7, 0.7) | ('predict', 0.3, 0.7, 0.8) | ('predict', 0.3, 0.7, 0.9) | ('predict', 0.3, 0.7, 1.0) | ('predict', 0.3, 0.8, 0.3) | ('predict', 0.3, 0.8, 0.4) | ('predict', 0.3, 0.8, 0.5) | ('predict', 0.3, 0.8, 0.6) | ('predict', 0.3, 0.8, 0.7) | ('predict', 0.3, 0.8, 0.8) | ('predict', 0.3, 0.8, 0.9) | ('predict', 0.3, 0.8, 1.0) | ('predict', 0.3, 0.9, 0.3) | ('predict', 0.3, 0.9, 0.4) | ('predict', 0.3, 0.9, 0.5) | ('predict', 0.3, 0.9, 0.6) | ('predict', 0.3, 0.9, 0.7) | ('predict', 0.3, 0.9, 0.8) | ('predict', 0.3, 0.9, 0.9) | ('predict', 0.3, 0.9, 1.0) | ('predict', 0.3, 1.0, 0.3) | ('predict', 0.3, 1.0, 0.4) | ('predict', 0.3, 1.0, 0.5) | ('predict', 0.3, 1.0, 0.6) | ('predict', 0.3, 1.0, 0.7) | ('predict', 0.3, 1.0, 0.8) | ('predict', 0.3, 1.0, 0.9) | ('predict', 0.3, 1.0, 1.0) | ('predict', 0.4, 0.3, 0.3) | ('predict', 0.4, 0.3, 0.4) | ('predict', 0.4, 0.3, 0.5) | ('predict', 0.4, 0.3, 0.6) | ('predict', 0.4, 0.3, 0.7) | ('predict', 0.4, 0.3, 0.8) | ('predict', 0.4, 0.3, 0.9) | ('predict', 0.4, 0.3, 1.0) | ('predict', 0.4, 0.4, 0.3) | ('predict', 0.4, 0.4, 0.4) | ('predict', 0.4, 0.4, 0.5) | ('predict', 0.4, 0.4, 0.6) | ('predict', 0.4, 0.4, 0.7) | ('predict', 0.4, 0.4, 0.8) | ('predict', 0.4, 0.4, 0.9) | ('predict', 0.4, 0.4, 1.0) | ('predict', 0.4, 0.5, 0.3) | ('predict', 0.4, 0.5, 0.4) | ('predict', 0.4, 0.5, 0.5) | ('predict', 0.4, 0.5, 0.6) | ('predict', 0.4, 0.5, 0.7) | ('predict', 0.4, 0.5, 0.8) | ('predict', 0.4, 0.5, 0.9) | ('predict', 0.4, 0.5, 1.0) | ('predict', 0.4, 0.6, 0.3) | ('predict', 0.4, 0.6, 0.4) | ('predict', 0.4, 0.6, 0.5) | ('predict', 0.4, 0.6, 0.6) | ('predict', 0.4, 0.6, 0.7) | ('predict', 0.4, 0.6, 0.8) | ('predict', 0.4, 0.6, 0.9) | ('predict', 0.4, 0.6, 1.0) | ('predict', 0.4, 0.7, 0.3) | ('predict', 0.4, 0.7, 0.4) | ('predict', 0.4, 0.7, 0.5) | ('predict', 0.4, 0.7, 0.6) | ('predict', 0.4, 0.7, 0.7) | ('predict', 0.4, 0.7, 0.8) | ('predict', 0.4, 0.7, 0.9) | ('predict', 0.4, 0.7, 1.0) | ('predict', 0.4, 0.8, 0.3) | ('predict', 0.4, 0.8, 0.4) | ('predict', 0.4, 0.8, 0.5) | ('predict', 0.4, 0.8, 0.6) | ('predict', 0.4, 0.8, 0.7) | ('predict', 0.4, 0.8, 0.8) | ('predict', 0.4, 0.8, 0.9) | ('predict', 0.4, 0.8, 1.0) | ('predict', 0.4, 0.9, 0.3) | ('predict', 0.4, 0.9, 0.4) | ('predict', 0.4, 0.9, 0.5) | ('predict', 0.4, 0.9, 0.6) | ('predict', 0.4, 0.9, 0.7) | ('predict', 0.4, 0.9, 0.8) | ('predict', 0.4, 0.9, 0.9) | ('predict', 0.4, 0.9, 1.0) | ('predict', 0.4, 1.0, 0.3) | ('predict', 0.4, 1.0, 0.4) | ('predict', 0.4, 1.0, 0.5) | ('predict', 0.4, 1.0, 0.6) | ('predict', 0.4, 1.0, 0.7) | ('predict', 0.4, 1.0, 0.8) | ('predict', 0.4, 1.0, 0.9) | ('predict', 0.4, 1.0, 1.0) | ('predict', 0.5, 0.3, 0.3) | ('predict', 0.5, 0.3, 0.4) | ('predict', 0.5, 0.3, 0.5) | ('predict', 0.5, 0.3, 0.6) | ('predict', 0.5, 0.3, 0.7) | ('predict', 0.5, 0.3, 0.8) | ('predict', 0.5, 0.3, 0.9) | ('predict', 0.5, 0.3, 1.0) | ('predict', 0.5, 0.4, 0.3) | ('predict', 0.5, 0.4, 0.4) | ('predict', 0.5, 0.4, 0.5) | ('predict', 0.5, 0.4, 0.6) | ('predict', 0.5, 0.4, 0.7) | ('predict', 0.5, 0.4, 0.8) | ('predict', 0.5, 0.4, 0.9) | ('predict', 0.5, 0.4, 1.0) | ('predict', 0.5, 0.5, 0.3) | ('predict', 0.5, 0.5, 0.4) | ('predict', 0.5, 0.5, 0.5) | ('predict', 0.5, 0.5, 0.6) | ('predict', 0.5, 0.5, 0.7) | ('predict', 0.5, 0.5, 0.8) | ('predict', 0.5, 0.5, 0.9) | ('predict', 0.5, 0.5, 1.0) | ('predict', 0.5, 0.6, 0.3) | ('predict', 0.5, 0.6, 0.4) | ('predict', 0.5, 0.6, 0.5) | ('predict', 0.5, 0.6, 0.6) | ('predict', 0.5, 0.6, 0.7) | ('predict', 0.5, 0.6, 0.8) | ('predict', 0.5, 0.6, 0.9) | ('predict', 0.5, 0.6, 1.0) | ('predict', 0.5, 0.7, 0.3) | ('predict', 0.5, 0.7, 0.4) | ('predict', 0.5, 0.7, 0.5) | ('predict', 0.5, 0.7, 0.6) | ('predict', 0.5, 0.7, 0.7) | ('predict', 0.5, 0.7, 0.8) | ('predict', 0.5, 0.7, 0.9) | ('predict', 0.5, 0.7, 1.0) | ('predict', 0.5, 0.8, 0.3) | ('predict', 0.5, 0.8, 0.4) | ('predict', 0.5, 0.8, 0.5) | ('predict', 0.5, 0.8, 0.6) | ('predict', 0.5, 0.8, 0.7) | ('predict', 0.5, 0.8, 0.8) | ('predict', 0.5, 0.8, 0.9) | ('predict', 0.5, 0.8, 1.0) | ('predict', 0.5, 0.9, 0.3) | ('predict', 0.5, 0.9, 0.4) | ('predict', 0.5, 0.9, 0.5) | ('predict', 0.5, 0.9, 0.6) | ('predict', 0.5, 0.9, 0.7) | ('predict', 0.5, 0.9, 0.8) | ('predict', 0.5, 0.9, 0.9) | ('predict', 0.5, 0.9, 1.0) | ('predict', 0.5, 1.0, 0.3) | ('predict', 0.5, 1.0, 0.4) | ('predict', 0.5, 1.0, 0.5) | ('predict', 0.5, 1.0, 0.6) | ('predict', 0.5, 1.0, 0.7) | ('predict', 0.5, 1.0, 0.8) | ('predict', 0.5, 1.0, 0.9) | ('predict', 0.5, 1.0, 1.0) | ('predict', 0.6, 0.3, 0.3) | ('predict', 0.6, 0.3, 0.4) | ('predict', 0.6, 0.3, 0.5) | ('predict', 0.6, 0.3, 0.6) | ('predict', 0.6, 0.3, 0.7) | ('predict', 0.6, 0.3, 0.8) | ('predict', 0.6, 0.3, 0.9) | ('predict', 0.6, 0.3, 1.0) | ('predict', 0.6, 0.4, 0.3) | ('predict', 0.6, 0.4, 0.4) | ('predict', 0.6, 0.4, 0.5) | ('predict', 0.6, 0.4, 0.6) | ('predict', 0.6, 0.4, 0.7) | ('predict', 0.6, 0.4, 0.8) | ('predict', 0.6, 0.4, 0.9) | ('predict', 0.6, 0.4, 1.0) | ('predict', 0.6, 0.5, 0.3) | ('predict', 0.6, 0.5, 0.4) | ('predict', 0.6, 0.5, 0.5) | ('predict', 0.6, 0.5, 0.6) | ('predict', 0.6, 0.5, 0.7) | ('predict', 0.6, 0.5, 0.8) | ('predict', 0.6, 0.5, 0.9) | ('predict', 0.6, 0.5, 1.0) | ('predict', 0.6, 0.6, 0.3) | ('predict', 0.6, 0.6, 0.4) | ('predict', 0.6, 0.6, 0.5) | ('predict', 0.6, 0.6, 0.6) | ('predict', 0.6, 0.6, 0.7) | ('predict', 0.6, 0.6, 0.8) | ('predict', 0.6, 0.6, 0.9) | ('predict', 0.6, 0.6, 1.0) | ('predict', 0.6, 0.7, 0.3) | ('predict', 0.6, 0.7, 0.4) | ('predict', 0.6, 0.7, 0.5) | ('predict', 0.6, 0.7, 0.6) | ('predict', 0.6, 0.7, 0.7) | ('predict', 0.6, 0.7, 0.8) | ('predict', 0.6, 0.7, 0.9) | ('predict', 0.6, 0.7, 1.0) | ('predict', 0.6, 0.8, 0.3) | ('predict', 0.6, 0.8, 0.4) | ('predict', 0.6, 0.8, 0.5) | ('predict', 0.6, 0.8, 0.6) | ('predict', 0.6, 0.8, 0.7) | ('predict', 0.6, 0.8, 0.8) | ('predict', 0.6, 0.8, 0.9) | ('predict', 0.6, 0.8, 1.0) | ('predict', 0.6, 0.9, 0.3) | ('predict', 0.6, 0.9, 0.4) | ('predict', 0.6, 0.9, 0.5) | ('predict', 0.6, 0.9, 0.6) | ('predict', 0.6, 0.9, 0.7) | ('predict', 0.6, 0.9, 0.8) | ('predict', 0.6, 0.9, 0.9) | ('predict', 0.6, 0.9, 1.0) | ('predict', 0.6, 1.0, 0.3) | ('predict', 0.6, 1.0, 0.4) | ('predict', 0.6, 1.0, 0.5) | ('predict', 0.6, 1.0, 0.6) | ('predict', 0.6, 1.0, 0.7) | ('predict', 0.6, 1.0, 0.8) | ('predict', 0.6, 1.0, 0.9) | ('predict', 0.6, 1.0, 1.0) | ('predict', 0.7, 0.3, 0.3) | ('predict', 0.7, 0.3, 0.4) | ('predict', 0.7, 0.3, 0.5) | ('predict', 0.7, 0.3, 0.6) | ('predict', 0.7, 0.3, 0.7) | ('predict', 0.7, 0.3, 0.8) | ('predict', 0.7, 0.3, 0.9) | ('predict', 0.7, 0.3, 1.0) | ('predict', 0.7, 0.4, 0.3) | ('predict', 0.7, 0.4, 0.4) | ('predict', 0.7, 0.4, 0.5) | ('predict', 0.7, 0.4, 0.6) | ('predict', 0.7, 0.4, 0.7) | ('predict', 0.7, 0.4, 0.8) | ('predict', 0.7, 0.4, 0.9) | ('predict', 0.7, 0.4, 1.0) | ('predict', 0.7, 0.5, 0.3) | ('predict', 0.7, 0.5, 0.4) | ('predict', 0.7, 0.5, 0.5) | ('predict', 0.7, 0.5, 0.6) | ('predict', 0.7, 0.5, 0.7) | ('predict', 0.7, 0.5, 0.8) | ('predict', 0.7, 0.5, 0.9) | ('predict', 0.7, 0.5, 1.0) | ('predict', 0.7, 0.6, 0.3) | ('predict', 0.7, 0.6, 0.4) | ('predict', 0.7, 0.6, 0.5) | ('predict', 0.7, 0.6, 0.6) | ('predict', 0.7, 0.6, 0.7) | ('predict', 0.7, 0.6, 0.8) | ('predict', 0.7, 0.6, 0.9) | ('predict', 0.7, 0.6, 1.0) | ('predict', 0.7, 0.7, 0.3) | ('predict', 0.7, 0.7, 0.4) | ('predict', 0.7, 0.7, 0.5) | ('predict', 0.7, 0.7, 0.6) | ('predict', 0.7, 0.7, 0.7) | ('predict', 0.7, 0.7, 0.8) | ('predict', 0.7, 0.7, 0.9) | ('predict', 0.7, 0.7, 1.0) | ('predict', 0.7, 0.8, 0.3) | ('predict', 0.7, 0.8, 0.4) | ('predict', 0.7, 0.8, 0.5) | ('predict', 0.7, 0.8, 0.6) | ('predict', 0.7, 0.8, 0.7) | ('predict', 0.7, 0.8, 0.8) | ('predict', 0.7, 0.8, 0.9) | ('predict', 0.7, 0.8, 1.0) | ('predict', 0.7, 0.9, 0.3) | ('predict', 0.7, 0.9, 0.4) | ('predict', 0.7, 0.9, 0.5) | ('predict', 0.7, 0.9, 0.6) | ('predict', 0.7, 0.9, 0.7) | ('predict', 0.7, 0.9, 0.8) | ('predict', 0.7, 0.9, 0.9) | ('predict', 0.7, 0.9, 1.0) | ('predict', 0.7, 1.0, 0.3) | ('predict', 0.7, 1.0, 0.4) | ('predict', 0.7, 1.0, 0.5) | ('predict', 0.7, 1.0, 0.6) | ('predict', 0.7, 1.0, 0.7) | ('predict', 0.7, 1.0, 0.8) | ('predict', 0.7, 1.0, 0.9) | ('predict', 0.7, 1.0, 1.0) | ('predict', 0.8, 0.3, 0.3) | ('predict', 0.8, 0.3, 0.4) | ('predict', 0.8, 0.3, 0.5) | ('predict', 0.8, 0.3, 0.6) | ('predict', 0.8, 0.3, 0.7) | ('predict', 0.8, 0.3, 0.8) | ('predict', 0.8, 0.3, 0.9) | ('predict', 0.8, 0.3, 1.0) | ('predict', 0.8, 0.4, 0.3) | ('predict', 0.8, 0.4, 0.4) | ('predict', 0.8, 0.4, 0.5) | ('predict', 0.8, 0.4, 0.6) | ('predict', 0.8, 0.4, 0.7) | ('predict', 0.8, 0.4, 0.8) | ('predict', 0.8, 0.4, 0.9) | ('predict', 0.8, 0.4, 1.0) | ('predict', 0.8, 0.5, 0.3) | ('predict', 0.8, 0.5, 0.4) | ('predict', 0.8, 0.5, 0.5) | ('predict', 0.8, 0.5, 0.6) | ('predict', 0.8, 0.5, 0.7) | ('predict', 0.8, 0.5, 0.8) | ('predict', 0.8, 0.5, 0.9) | ('predict', 0.8, 0.5, 1.0) | ('predict', 0.8, 0.6, 0.3) | ('predict', 0.8, 0.6, 0.4) | ('predict', 0.8, 0.6, 0.5) | ('predict', 0.8, 0.6, 0.6) | ('predict', 0.8, 0.6, 0.7) | ('predict', 0.8, 0.6, 0.8) | ('predict', 0.8, 0.6, 0.9) | ('predict', 0.8, 0.6, 1.0) | ('predict', 0.8, 0.7, 0.3) | ('predict', 0.8, 0.7, 0.4) | ('predict', 0.8, 0.7, 0.5) | ('predict', 0.8, 0.7, 0.6) | ('predict', 0.8, 0.7, 0.7) | ('predict', 0.8, 0.7, 0.8) | ('predict', 0.8, 0.7, 0.9) | ('predict', 0.8, 0.7, 1.0) | ('predict', 0.8, 0.8, 0.3) | ('predict', 0.8, 0.8, 0.4) | ('predict', 0.8, 0.8, 0.5) | ('predict', 0.8, 0.8, 0.6) | ('predict', 0.8, 0.8, 0.7) | ('predict', 0.8, 0.8, 0.8) | ('predict', 0.8, 0.8, 0.9) | ('predict', 0.8, 0.8, 1.0) | ('predict', 0.8, 0.9, 0.3) | ('predict', 0.8, 0.9, 0.4) | ('predict', 0.8, 0.9, 0.5) | ('predict', 0.8, 0.9, 0.6) | ('predict', 0.8, 0.9, 0.7) | ('predict', 0.8, 0.9, 0.8) | ('predict', 0.8, 0.9, 0.9) | ('predict', 0.8, 0.9, 1.0) | ('predict', 0.8, 1.0, 0.3) | ('predict', 0.8, 1.0, 0.4) | ('predict', 0.8, 1.0, 0.5) | ('predict', 0.8, 1.0, 0.6) | ('predict', 0.8, 1.0, 0.7) | ('predict', 0.8, 1.0, 0.8) | ('predict', 0.8, 1.0, 0.9) | ('predict', 0.8, 1.0, 1.0) | ('predict', 0.9, 0.3, 0.3) | ('predict', 0.9, 0.3, 0.4) | ('predict', 0.9, 0.3, 0.5) | ('predict', 0.9, 0.3, 0.6) | ('predict', 0.9, 0.3, 0.7) | ('predict', 0.9, 0.3, 0.8) | ('predict', 0.9, 0.3, 0.9) | ('predict', 0.9, 0.3, 1.0) | ('predict', 0.9, 0.4, 0.3) | ('predict', 0.9, 0.4, 0.4) | ('predict', 0.9, 0.4, 0.5) | ('predict', 0.9, 0.4, 0.6) | ('predict', 0.9, 0.4, 0.7) | ('predict', 0.9, 0.4, 0.8) | ('predict', 0.9, 0.4, 0.9) | ('predict', 0.9, 0.4, 1.0) | ('predict', 0.9, 0.5, 0.3) | ('predict', 0.9, 0.5, 0.4) | ('predict', 0.9, 0.5, 0.5) | ('predict', 0.9, 0.5, 0.6) | ('predict', 0.9, 0.5, 0.7) | ('predict', 0.9, 0.5, 0.8) | ('predict', 0.9, 0.5, 0.9) | ('predict', 0.9, 0.5, 1.0) | ('predict', 0.9, 0.6, 0.3) | ('predict', 0.9, 0.6, 0.4) | ('predict', 0.9, 0.6, 0.5) | ('predict', 0.9, 0.6, 0.6) | ('predict', 0.9, 0.6, 0.7) | ('predict', 0.9, 0.6, 0.8) | ('predict', 0.9, 0.6, 0.9) | ('predict', 0.9, 0.6, 1.0) | ('predict', 0.9, 0.7, 0.3) | ('predict', 0.9, 0.7, 0.4) | ('predict', 0.9, 0.7, 0.5) | ('predict', 0.9, 0.7, 0.6) | ('predict', 0.9, 0.7, 0.7) | ('predict', 0.9, 0.7, 0.8) | ('predict', 0.9, 0.7, 0.9) | ('predict', 0.9, 0.7, 1.0) | ('predict', 0.9, 0.8, 0.3) | ('predict', 0.9, 0.8, 0.4) | ('predict', 0.9, 0.8, 0.5) | ('predict', 0.9, 0.8, 0.6) | ('predict', 0.9, 0.8, 0.7) | ('predict', 0.9, 0.8, 0.8) | ('predict', 0.9, 0.8, 0.9) | ('predict', 0.9, 0.8, 1.0) | ('predict', 0.9, 0.9, 0.3) | ('predict', 0.9, 0.9, 0.4) | ('predict', 0.9, 0.9, 0.5) | ('predict', 0.9, 0.9, 0.6) | ('predict', 0.9, 0.9, 0.7) | ('predict', 0.9, 0.9, 0.8) | ('predict', 0.9, 0.9, 0.9) | ('predict', 0.9, 0.9, 1.0) | ('predict', 0.9, 1.0, 0.3) | ('predict', 0.9, 1.0, 0.4) | ('predict', 0.9, 1.0, 0.5) | ('predict', 0.9, 1.0, 0.6) | ('predict', 0.9, 1.0, 0.7) | ('predict', 0.9, 1.0, 0.8) | ('predict', 0.9, 1.0, 0.9) | ('predict', 0.9, 1.0, 1.0) | ('predict', 1.0, 0.3, 0.3) | ('predict', 1.0, 0.3, 0.4) | ('predict', 1.0, 0.3, 0.5) | ('predict', 1.0, 0.3, 0.6) | ('predict', 1.0, 0.3, 0.7) | ('predict', 1.0, 0.3, 0.8) | ('predict', 1.0, 0.3, 0.9) | ('predict', 1.0, 0.3, 1.0) | ('predict', 1.0, 0.4, 0.3) | ('predict', 1.0, 0.4, 0.4) | ('predict', 1.0, 0.4, 0.5) | ('predict', 1.0, 0.4, 0.6) | ('predict', 1.0, 0.4, 0.7) | ('predict', 1.0, 0.4, 0.8) | ('predict', 1.0, 0.4, 0.9) | ('predict', 1.0, 0.4, 1.0) | ('predict', 1.0, 0.5, 0.3) | ('predict', 1.0, 0.5, 0.4) | ('predict', 1.0, 0.5, 0.5) | ('predict', 1.0, 0.5, 0.6) | ('predict', 1.0, 0.5, 0.7) | ('predict', 1.0, 0.5, 0.8) | ('predict', 1.0, 0.5, 0.9) | ('predict', 1.0, 0.5, 1.0) | ('predict', 1.0, 0.6, 0.3) | ('predict', 1.0, 0.6, 0.4) | ('predict', 1.0, 0.6, 0.5) | ('predict', 1.0, 0.6, 0.6) | ('predict', 1.0, 0.6, 0.7) | ('predict', 1.0, 0.6, 0.8) | ('predict', 1.0, 0.6, 0.9) | ('predict', 1.0, 0.6, 1.0) | ('predict', 1.0, 0.7, 0.3) | ('predict', 1.0, 0.7, 0.4) | ('predict', 1.0, 0.7, 0.5) | ('predict', 1.0, 0.7, 0.6) | ('predict', 1.0, 0.7, 0.7) | ('predict', 1.0, 0.7, 0.8) | ('predict', 1.0, 0.7, 0.9) | ('predict', 1.0, 0.7, 1.0) | ('predict', 1.0, 0.8, 0.3) | ('predict', 1.0, 0.8, 0.4) | ('predict', 1.0, 0.8, 0.5) | ('predict', 1.0, 0.8, 0.6) | ('predict', 1.0, 0.8, 0.7) | ('predict', 1.0, 0.8, 0.8) | ('predict', 1.0, 0.8, 0.9) | ('predict', 1.0, 0.8, 1.0) | ('predict', 1.0, 0.9, 0.3) | ('predict', 1.0, 0.9, 0.4) | ('predict', 1.0, 0.9, 0.5) | ('predict', 1.0, 0.9, 0.6) | ('predict', 1.0, 0.9, 0.7) | ('predict', 1.0, 0.9, 0.8) | ('predict', 1.0, 0.9, 0.9) | ('predict', 1.0, 0.9, 1.0) | ('predict', 1.0, 1.0, 0.3) | ('predict', 1.0, 1.0, 0.4) | ('predict', 1.0, 1.0, 0.5) | ('predict', 1.0, 1.0, 0.6) | ('predict', 1.0, 1.0, 0.7) | ('predict', 1.0, 1.0, 0.8) | ('predict', 1.0, 1.0, 0.9) | ('predict', 1.0, 1.0, 1.0) | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| Date | ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||
| 1991-01-31 00:00:00 | 54.000000 | 57.000000 | 49.374303 | 47.264380 | 45.166884 | 43.386641 | 45.364205 | 48.434222 | 48.268774 | 47.724016 | 47.924343 | 44.687365 | 41.629955 | 39.607014 | 40.545438 | 42.589284 | -15.499905 | -70.631540 | 46.942979 | 43.895680 | 41.791747 | 39.914158 | 39.071911 | 44.517648 | 24.555518 | 10.965358 | 47.559745 | 45.289807 | 43.543183 | 41.448652 | 39.216535 | 258.066107 | -310.815654 | 69.966854 | 48.868170 | 46.811637 | 44.769704 | 67.245430 | 4380.563274 | 4.914234 | -392.114116 | -515.803379 | 50.166518 | -11.857673 | -29.191581 | 577.588081 | -103.307971 | 89.605606 | 51.057755 | -68.707833 | 51.434956 | -80.745194 | -75.624415 | -158.906073 | -15.816953 | -949.077673 | 117.098471 | -140.628129 | 52.731926 | 41.109257 | 38.659111 | 215.712018 | 140.087554 | -144.326304 | 164.435706 | 115.235291 | 49.826593 | 47.622382 | 45.587677 | 42.224684 | 37.743394 | 34.892748 | 32.991351 | 31.090356 | 49.832113 | 47.735718 | 46.215189 | 43.759279 | 39.673111 | 35.433635 | 30.851516 | 71.207916 | 50.492419 | 48.504854 | 47.108422 | 44.691044 | 40.482243 | 4.183322 | 6.686671 | 0.588350 | 51.624438 | 49.602223 | 47.732295 | 45.156376 | 392.235042 | -283.435319 | 72.800202 | 63.912913 | 53.092456 | 51.129722 | 48.974560 | 75.221993 | 16.168149 | -4.270439 | -84.615469 | 124.810649 | 54.780445 | 53.145579 | 19.095954 | 40.584300 | 41.534231 | -3.158860 | 4157.365126 | -305.932458 | 56.604408 | 55.486215 | 55.354755 | 70.337332 | 47.860215 | 67.774680 | 38.788349 | 21.445753 | 58.495438 | 58.516506 | 60.766661 | 60.000272 | 45.201749 | 64.317484 | 114.615445 | 88.195190 | 51.284614 | 49.389434 | 49.227883 | 47.798751 | 44.107173 | 38.929461 | 32.386094 | 25.629239 | 51.921022 | 49.890849 | 49.224536 | 48.233590 | 45.378438 | 39.948721 | 64.590415 | 73.453282 | 53.164114 | 51.035835 | 50.068820 | 49.485022 | -7.874744 | -24.152415 | 569.795554 | 63.112615 | 54.804219 | 52.838568 | 52.393489 | 52.836884 | 30.832908 | 13.422598 | -20.132709 | -120.468834 | 56.519316 | 55.125624 | 55.393042 | 77.998821 | 39.266585 | 45.448767 | 31.875182 | 14.850727 | 57.981620 | 57.379473 | 57.259206 | 57.780225 | 92.819102 | 59.526488 | 32.295977 | 10.156399 | 58.962613 | 58.824165 | 56.898414 | 447.538835 | 125.386513 | -47.540748 | 55.511760 | -5.953940 | 59.628419 | 59.276872 | -3.046023 | -23.884395 | -1.667297 | 350.049352 | -12.274859 | 10.030094 | 52.906428 | 50.543657 | 50.638268 | 50.862881 | 49.350726 | 45.683901 | 38.545056 | -20.935661 | 54.047669 | 51.301919 | 50.964334 | 51.492333 | 51.195404 | 47.956439 | 12.875284 | -55.389950 | 55.616950 | 52.764489 | 52.735401 | 53.071266 | 65.052682 | 57.343258 | 34.860590 | 10.355360 | 57.184063 | 54.492211 | 54.607353 | 53.909350 | 85.803089 | 72.675177 | 47.968322 | 30.020839 | 58.467833 | 56.159043 | 67.715686 | 87.590206 | 5099.127986 | 278.961304 | 25.800925 | 46.460216 | 59.709354 | 58.055253 | 29.456403 | -4342.484167 | 377.742943 | -76.441161 | -31.648535 | -485.784082 | 61.952846 | -9.370513 | -188.636870 | 2.345356 | -271.647214 | 141.534897 | -1171.999406 | 22.218697 | 599.911606 | -127.372522 | 8.780296 | -1004.169271 | 56.482393 | 100.791934 | -204.903982 | -77.903843 | 54.711819 | 51.545004 | 52.032648 | 52.796747 | 52.408780 | 49.868483 | 44.060601 | 21.087187 | 56.328770 | 53.001072 | 53.061027 | 53.802439 | 34.963480 | 61.333561 | 58.033218 | 37.737288 | 58.235716 | 55.070654 | 55.548073 | 55.999169 | -25.715839 | -295.521784 | 86.375281 | 65.167486 | 60.198021 | 57.865006 | 59.345829 | 74.918005 | 30497.087589 | -126.555221 | 136.848872 | 142.724797 | 62.512965 | 62.088051 | -32.313532 | 119.590132 | 72.407622 | 52.293552 | -27.872790 | -138.636175 | 65.777665 | 63.244670 | -2.075548 | 9.009556 | 79.416372 | -71.393621 | 119.125459 | 25.448479 | 68.777158 | 785.428867 | 55.251837 | -22.688755 | 135.967437 | 18.448605 | -605.481473 | 101.581228 | 153.424570 | 85.386327 | -165.935725 | 203.423274 | -176.321298 | -214.949637 | 41.769156 | 683.363219 | 56.659972 | 53.956323 | 55.001786 | 56.843467 | 57.828078 | 55.064918 | 57.274317 | 47.518052 | 58.905262 | 56.841415 | 58.472100 | 61.516766 | 64.108683 | 73.856108 | 117.647652 | 55.072982 | 61.450941 | 60.811272 | 64.822069 | 69.919734 | 30.543762 | 20.895805 | -65.937691 | 66.233407 | 64.196300 | 66.544631 | 74.876805 | -81.929812 | -278.717408 | -19.203783 | 12.574238 | 248.203894 | 67.381004 | 54.349269 | 40.944430 | -286.418990 | -30.997134 | -6.107894 | -156.946574 | -11876.276955 | -87.262633 | -21.196171 | -28.350896 | -39.537493 | -49.812884 | -3.996846 | -99.921780 | -98.174661 | 42.756615 | 72.755520 | 104.149788 | 543.058192 | -13.747792 | -271.578066 | 9.635738 | 7.971223 | 405.688589 | -11.092479 | 18.946148 | 41.135528 | 96.318631 | 5.786243 | 24.178130 | -197.995114 | 57.952073 | 57.904474 | 60.828135 | 65.550279 | 69.840219 | 67.601971 | -14.767973 | 18.235626 | 60.589440 | 62.031241 | 67.259913 | 73.746488 | 18.936395 | 5.769639 | 27.038313 | -0.692336 | 63.030186 | 67.044303 | 75.747740 | 88.730910 | -0.383298 | 50.134987 | 5.780815 | -28.641349 | 65.059169 | 57.680420 | 99.437638 | -34.757849 | -3.832406 | -73.954503 | 2902.289963 | -25.286334 | 133.097555 | 52.359728 | 269.958500 | 32.795862 | -45.603959 | 7.803334 | -0.476290 | -3.256523 | -237.627467 | 40.258733 | 36.654005 | 146.696083 | -24.922639 | -108.158171 | 627.075786 | 7.363510 | 80.622680 | 39.686999 | 0.455226 | -1100.445031 | 55.302504 | 80.948593 | -78.665991 | 0.919675 | 136.002179 | 8724.900818 | -82.012700 | 4.871609 | -19.508783 | -63.626185 | 467.127767 | 19.360286 | 57.689301 | 61.238789 | 66.238740 | 79.101055 | 219.459077 | 90.461663 | 56.600299 | 27.708248 | 59.939502 | 192.183395 | -219.218071 | 54.229319 | -249.455548 | 48.589823 | 18.372717 | 28.946322 | 126.467688 | -22.509416 | -23.728774 | 278.664291 | 68.492039 | -4.299981 | 39.150738 | 41.889515 | 14.967769 | 51.119993 | 13.452607 | 121.398029 | -1103.621561 | 410.813604 | 2.070946 | 80.786140 | 2.722676 | 19.568311 | 149.005409 | -128515.401881 | -412.859523 | -368.132959 | -254.719317 | -67.470664 | 136.689713 | 300.674189 | 9.275272 | 135.982877 | -300.118390 | -465.834200 | 592.529451 | -632.128619 | 47.215950 | -21.354931 | 327.296922 | 233.032510 | 12.781217 | 25242.913161 | 378.087688 | -152.673923 | 24.194953 | -373.207339 | -148.421346 | -48.891077 | 49.295035 | 55.449316 | 116.817871 | -89.344744 |
| 1991-02-28 00:00:00 | 55.000000 | 64.000000 | 64.882414 | 63.199478 | 61.262130 | 59.727934 | 64.906967 | 76.318661 | 83.582884 | 133.526792 | 63.071127 | 59.389871 | 55.527447 | 53.165757 | 56.359798 | 64.689119 | -27.440690 | -89.299368 | 62.366772 | 58.957919 | 56.846727 | 55.244940 | 55.738803 | 99.959759 | 60.438896 | 34.098347 | 64.336686 | 62.260687 | 60.984596 | 59.493509 | 57.656898 | 78.155156 | 229.649570 | 221.155303 | 67.397499 | 65.792015 | 64.194539 | -439.308159 | 328.805661 | 15.321897 | -94.260176 | -147.689218 | 70.476443 | -14.441132 | 299.319719 | -190.716313 | -1.099287 | 6.277252 | 189.714784 | -35.613170 | 73.528596 | -76.529742 | -63.063671 | -168.554991 | -299.520854 | -196.568983 | 242.146842 | -61.748245 | 76.575502 | 83.364407 | 89.318429 | 376.712606 | 305.730848 | 105.542299 | 297.473987 | 701.564587 | 66.935107 | 65.460351 | 64.582295 | 62.258305 | 56.742815 | 54.189565 | 53.964856 | 53.783771 | 67.809932 | 66.464133 | 66.572896 | 66.015424 | 61.878774 | 57.014563 | 50.544693 | 242.131353 | 69.916370 | 68.691293 | 69.042156 | 68.719487 | 64.968373 | -11.938853 | 127.050443 | 36.160392 | 72.952705 | 71.686012 | 71.308308 | 71.177080 | -149.709478 | 59.583680 | -300.904840 | 189.971983 | 76.592582 | 75.583011 | 75.280761 | 182.424051 | 68.357196 | 124.684001 | -53.902635 | 877.867949 | 80.469276 | 80.301172 | 50.355428 | 86.093296 | 75.481609 | -129.275724 | 1833.350885 | -105.103626 | 84.255347 | 85.328231 | 91.383778 | 167.155143 | 108.663360 | 172.465603 | 84.073676 | 94.407005 | 87.589369 | 91.248492 | 103.908353 | 119.080296 | 92.866325 | 179.349645 | 276.571047 | 298.362256 | 70.436403 | 69.285579 | 72.081987 | 74.938823 | 73.815759 | 69.644008 | 61.524902 | 50.634977 | 72.685811 | 71.090629 | 72.853237 | 76.870053 | 78.507908 | 75.265431 | 54.174135 | 217.190568 | 76.186360 | 74.203359 | 75.704713 | 81.273061 | 0.101907 | -19.296801 | 294.598041 | 228.593566 | 80.356156 | 78.342218 | 81.775986 | 90.845008 | 58.654023 | 47.217190 | -11.344040 | -65.856648 | 84.318092 | 82.879597 | 89.154904 | 164.161966 | 74.719478 | 111.098183 | 66.441654 | 47.037657 | 87.206463 | 87.137936 | 94.495938 | 108.385897 | 287.199897 | 206.655397 | 100.015049 | 24.364277 | 88.641871 | 90.734230 | 96.143689 | -425.451961 | 89.570741 | -376.269211 | -1501.210145 | -9.107744 | 89.725669 | 94.078627 | 565.067996 | 0.878110 | 189.211310 | 805.471851 | -333.452580 | 64.627364 | 74.502783 | 71.931380 | 75.117942 | 82.714814 | 89.148602 | 93.374747 | 89.580459 | -98.919643 | 78.139717 | 74.414272 | 76.926596 | 85.902303 | 97.293712 | 106.383812 | 30.426577 | -85.360799 | 82.688026 | 78.129539 | 82.154498 | 92.292782 | 141.286611 | 145.581913 | 94.498297 | 39.939416 | 87.031788 | 82.061838 | 88.365482 | 98.454691 | 102.740296 | 242.725807 | 159.864869 | 111.023530 | 90.456381 | 86.443848 | 113.402430 | 204.186350 | -542.316953 | 2316.334713 | 61.875712 | 254.639947 | 93.718335 | 93.138516 | 24.072944 | 14088.912238 | -375.189700 | 30.050155 | -410.597269 | -222.608691 | 99.882605 | 3.518237 | 14412.253469 | 120.965515 | -191.647241 | -362.956716 | 1312.309206 | 137.776432 | 1892.154794 | -1105.664383 | 0.969734 | 864.760046 | 117.840411 | 44.669190 | -330.493275 | -113.532027 | 79.269117 | 74.517488 | 79.074612 | 89.527031 | 102.628481 | 116.652785 | 123.458153 | 55.864891 | 84.441153 | 79.057861 | 83.380803 | 96.022398 | 922.695080 | 118.927712 | 195.521303 | 132.593618 | 90.270959 | 84.857107 | 92.386216 | 109.101710 | -22.016186 | -149.443815 | 496.110496 | 337.332853 | 95.942481 | 92.791910 | 107.749833 | 189.260695 | -7.612345 | -2.243913 | -364.468235 | -6904.997760 | 102.084761 | 105.976043 | -49.221748 | 71.518365 | 110.275841 | 85.889069 | -18.201786 | -203.645608 | 110.594482 | 76.709282 | 67.480715 | 68.553088 | 117.271260 | -181.184045 | -990.277111 | 287.643572 | 90.630464 | 2167.855628 | 854.600288 | 4.238598 | 632.917143 | -50.071455 | -403.013229 | -1175.919654 | 317.710199 | 128.649154 | 215.394000 | -67.202050 | 3622.802935 | -804.081007 | -1388.194860 | -365.420262 | 84.378711 | 80.127883 | 87.254561 | 103.899132 | 130.634846 | 159.011982 | 229.925466 | 215.684540 | 91.319112 | 88.704647 | 99.431495 | 127.025620 | 174.020479 | 273.637572 | -72171.844913 | 213.386156 | 98.745039 | 99.906420 | 123.678805 | 177.856105 | 4.821004 | 4.084262 | 1.890486 | 176.000092 | 106.028497 | 117.547039 | 176.255551 | -163.988381 | 412.017602 | 114.000744 | -335.168175 | -698.645904 | 114.047704 | 277.078285 | 67.286154 | -157.301657 | 55.070501 | -487.350563 | -167.145706 | -479.048987 | 44.829378 | 12.738682 | -78.906244 | -27.981718 | -487.180861 | -68.273164 | -374.907807 | -249.484283 | 251.767752 | 66.119869 | 241.022412 | -93.962204 | -29.822330 | -30.687035 | -263.598083 | 235.594273 | -3038.938122 | -13.578862 | -2.546209 | 33.692459 | 173.629974 | -64.890319 | 121.625170 | -330.983663 | 87.441097 | 88.752927 | 102.289247 | 133.642697 | 196.863430 | 259.696195 | 6.705750 | 79.229327 | 95.192288 | 100.542282 | 125.614839 | 180.699169 | -27.491918 | -4.763233 | 48.701939 | -31.618415 | 101.946534 | 115.471984 | 169.547978 | -428.655359 | -23.428709 | 65.869078 | -26.402198 | -439.018531 | 107.188225 | 39.169265 | 617.572091 | -36.428394 | -98.076973 | -320.169949 | -11577.529360 | -3943.063007 | 599.937908 | 24.528402 | 1695.545931 | 28.581752 | -54.583786 | 56.529052 | 115.182897 | -4.009898 | 31.337700 | 104.265313 | 28.181403 | 43.277833 | 44.792055 | -379.234291 | -1540.392017 | -50.521024 | 286.092678 | -18.371388 | -0.240892 | -1105.685372 | 291.303822 | 459.997011 | -477.064547 | -10.189051 | 2396.195411 | -418282.450332 | -1625.742895 | 0.078150 | 20.270060 | -143.931974 | 1049.241434 | -76.822126 | 85.997131 | 94.757724 | 114.133181 | 246.447299 | -160.602740 | 361.624699 | 131.244592 | -533.934803 | 92.619779 | -332.730280 | -27.893262 | -97.256658 | -279.115552 | 40.722108 | -124.580017 | -172.438273 | -425.666205 | 171.165814 | 56.086646 | 144.398262 | 113.959392 | 29.518561 | 636.481949 | -146.956310 | -3.378446 | -111.621583 | 2.972401 | 121.151178 | -7277.385282 | 738.974009 | -185.889698 | -175.080047 | 10.103223 | 0.942727 | 2584.578673 | -1238.022428 | -528.544676 | 391.255180 | -18610.353843 | 311.799193 | -2748.712565 | 127.029931 | 15.454605 | 113.365640 | 1753.299942 | 2530.025918 | 16873.339551 | -351.288690 | 168.367246 | 16000.801036 | 3331.294211 | 1109.801920 | -70.675901 | -3623.617179 | 449.107197 | -717.076743 | -269.139810 | 79.735632 | -206.176791 | -1.460677 | -202.178195 | 1430.417952 | 1421.173769 | 2252.543325 |
| 1991-03-31 00:00:00 | 66.000000 | 70.000000 | 69.028511 | 66.330022 | 62.858947 | 58.643347 | 60.022435 | 72.821764 | 80.034044 | 463.880170 | 67.009384 | 61.534071 | 55.446052 | 50.490835 | 51.284240 | 60.362184 | -28.690296 | -63.736336 | 66.689876 | 61.582912 | 58.190709 | 55.363869 | 54.484411 | 180.018369 | 92.576735 | 51.629479 | 69.877313 | 66.612716 | 64.800570 | 63.064371 | 59.824006 | -1154.109358 | -1332.127933 | 1044.283686 | 74.636953 | 72.243816 | 70.471482 | -70.244894 | -266.054733 | 3.614085 | -53.848392 | -74.237835 | 79.784860 | -14.851083 | 133.932853 | -116.531449 | 293.314242 | 498.423732 | 264.223175 | -7.952435 | 85.263353 | -60.153401 | -53.781851 | -185.085356 | -268.262690 | -134.530879 | -313.703081 | -22.590492 | 91.011276 | 136.685394 | 143.385625 | 443.746670 | 345.889182 | 114.384371 | -938.123729 | -564.318595 | 71.999883 | 69.686091 | 68.480829 | 66.312138 | 57.383685 | 50.847141 | 46.180167 | 39.833192 | 73.846091 | 71.616751 | 71.886286 | 72.344767 | 66.249522 | 57.511801 | 45.165433 | 2041.876517 | 77.642082 | 75.482108 | 76.125992 | 77.257427 | 73.033686 | -104.321995 | 211.319381 | 60.920957 | 83.133314 | 80.885596 | 80.697392 | 83.237115 | -85.378975 | -281.686076 | 416.026099 | 430.565330 | 89.813199 | 87.948119 | 88.686941 | -21.655161 | 158.164534 | 70.408976 | -22.680536 | -473.092098 | 96.925030 | 96.213526 | -52.355192 | 146.106076 | 106.153869 | -247.078374 | 924.461544 | -40.939808 | 103.610483 | 104.249461 | 117.940368 | 351.938758 | 212.876843 | 315.361457 | 98.488223 | 125.894817 | 108.687741 | 112.091336 | 136.492525 | 199.489080 | 248.192530 | 454.997475 | 327.364345 | 283.742179 | 76.824884 | 74.028627 | 76.464145 | 81.460261 | 80.141479 | 72.934751 | 58.990509 | 40.318021 | 81.090996 | 77.176199 | 77.463930 | 83.664996 | 87.146445 | 84.069477 | 401.650600 | 1077.219745 | 87.579387 | 82.402768 | 81.747624 | 89.956069 | 135.942607 | -6.042955 | 144.850101 | 14247.846518 | 95.228323 | 88.641460 | 90.157585 | 102.256935 | 82.159667 | 78.849199 | 1.411484 | -23.147947 | 102.235820 | 93.912026 | 98.315089 | 277.168010 | 125.216576 | 219.333933 | 93.433923 | 48.784213 | 106.585020 | 96.918080 | 101.887356 | 117.219904 | -28829.646596 | -118.789970 | 733.720745 | 28.872211 | 107.252537 | 99.251805 | 103.892102 | -262.044170 | -298.096413 | -773.242895 | -355.470381 | -12.768697 | 106.711873 | 104.558708 | -192.292921 | -0.773746 | 74.288122 | 667.427229 | 1469.704279 | -598.952875 | 82.585714 | 75.974691 | 75.158839 | 82.931890 | 88.410868 | 90.046896 | 80.544639 | -57.889003 | 89.447695 | 79.899750 | 76.366003 | 83.125400 | 91.849645 | 99.612324 | 28.659739 | -42.252592 | 98.011488 | 85.074956 | 81.608868 | 87.026269 | 179.424996 | 210.083845 | 134.611694 | 43.303625 | 106.059513 | 89.094532 | 87.715226 | 92.552262 | 137.049117 | 713.957791 | 626.280593 | 187.543784 | 111.761710 | 92.998551 | 49.474214 | 293.437473 | 61.535861 | -279.856682 | -54.214570 | -2370.368507 | 115.902017 | 101.249587 | -45.518654 | 3686.878618 | 656.397064 | 308.155284 | -509.393441 | -198.132514 | 123.616712 | -86.469955 | -153.558210 | 25.299118 | 571.594081 | -78.157063 | 708.813411 | -264.072262 | 870.306497 | -4487.898262 | -10.316770 | 474.052022 | -0.491483 | -1131.165228 | -214.718920 | -252.424352 | 89.314584 | 77.286941 | 73.922180 | 78.179609 | 82.837492 | 89.026231 | 82.996183 | 32.061478 | 98.973421 | 84.079422 | 77.598191 | 80.866076 | -101.131403 | 211.881815 | 230.249546 | 122.692968 | 109.717952 | 91.407542 | 86.664598 | 93.738594 | -10.185318 | -24.690523 | -3932.844588 | 692.703209 | 119.093012 | 99.667382 | 102.668910 | 340.258392 | -22.225069 | -4.224986 | -315.900352 | -610.213703 | 126.690841 | 112.415849 | -50.237200 | -106.973719 | -51.619921 | -122.966459 | 44.812068 | -227.891612 | 134.469981 | 75.734916 | 113.043968 | -18.717158 | 199.355634 | -215.173312 | 324.765458 | -1141.661878 | 102.254769 | 4009.577321 | 1252.552766 | 1.069772 | 43.733087 | -683.007447 | 19.100043 | -559.008639 | 788.289751 | 107.453618 | -606.379312 | -209.007427 | 12.788123 | -218.389566 | -1313.884851 | -790.222699 | 96.086928 | 81.798253 | 77.156562 | 78.853073 | 84.296123 | 96.534273 | 190.640983 | 64.931423 | 108.297038 | 93.473219 | 87.969639 | 93.997570 | 136.733552 | 466.550678 | -185.375992 | 1380.130258 | 120.314012 | 104.903582 | 107.009301 | 134.316355 | -37.924388 | -2.993446 | 1.265409 | -49.442766 | 129.078293 | 118.150141 | 144.816482 | -7.480252 | 434.189700 | 18.602519 | 70.097804 | 207.293065 | 134.281447 | 567.809564 | 4.085072 | -51.277423 | -98.053290 | 5.037102 | 56.255465 | 334.126406 | 325.653958 | 0.619187 | 13.599512 | -3.316159 | -22.260095 | -12.412384 | 4.351880 | 137.626979 | 254.882630 | -50.293081 | 281.356177 | -155.196710 | 18.085908 | 656.685259 | 8.286457 | -52.887332 | 4595.918743 | 7.748252 | -1.809185 | -6.967651 | -99.949735 | -0.075801 | -15.194105 | 1117.807331 | 99.127321 | 89.383542 | 85.995800 | 86.404550 | 93.221262 | 97.882175 | 633.644194 | -51.007064 | 111.944383 | 102.436586 | 102.044719 | 111.219293 | 29.090355 | -16.724218 | -12.298323 | 10.156428 | 121.587506 | 113.642562 | 127.349582 | -55.140668 | -6.201487 | -21.952917 | -2.471519 | 37.449093 | 126.257431 | 8.421633 | 1028.194243 | -17.841942 | 37.838133 | -336.990738 | -4.851052 | 28.960666 | 789.794729 | -43.670519 | 120.301098 | -12.574762 | -53.657518 | 4.017084 | 111.506100 | -3.429339 | 1039.292899 | 245.142212 | 68.685289 | -2.168760 | 6.828384 | -108.297319 | 802.311954 | -18.776555 | -454.735352 | -21.478601 | 2.802738 | -972.529891 | -18.563569 | 26.060348 | -55.303165 | -9.090868 | -484.040978 | -2622.658935 | 386.121761 | 0.837259 | 10.229960 | -1.032455 | 755.298599 | -161.269716 | 96.692142 | 94.618153 | 91.360196 | -2308.962107 | -9.025587 | -323.684691 | 0.597288 | -41.675314 | 108.741054 | -76.104814 | -7.369698 | -42.509611 | -97.557983 | 1.597164 | -86.930323 | -39.748887 | -1464.217331 | 290.677945 | 93.833113 | -82.069165 | 54.474228 | 30.671816 | 826.720629 | -65.847151 | -0.857244 | -35.134533 | -6.097657 | -140.790290 | 804.501662 | 1203.861359 | -213.600950 | -167.231528 | -5.790429 | -7.233806 | -181.663257 | 183.111197 | -47.449244 | 32.447189 | -237.990486 | -258.352979 | -183.446795 | -33.529081 | 227.106282 | -31.720614 | 3.278376 | 339.994911 | 8393.002347 | 506.727913 | -2657.082838 | 157.172103 | -586.054350 | 5156.663865 | -370.447634 | -36080.937203 | -4432.336115 | 287.404490 | -2401.279835 | 1323.784927 | 76.088781 | 12.568191 | 147.079960 | -471.212511 | -2902.971451 | 245.962823 |
| 1991-04-30 00:00:00 | 65.000000 | 61.000000 | 65.872790 | 62.996052 | 58.463784 | 52.770758 | 52.379957 | 68.714694 | 83.243314 | -1114.326719 | 64.210886 | 58.154916 | 50.513246 | 43.825521 | 43.403181 | 55.302900 | -34.415800 | -69.193080 | 64.655456 | 58.734386 | 53.873056 | 49.971005 | 48.854507 | 349.522792 | 240.915561 | 90.265543 | 68.780996 | 64.575014 | 61.438187 | 59.363928 | 56.399016 | -189.438813 | -326.688895 | -830.564975 | 74.546556 | 71.184280 | 68.188517 | 234.175584 | 368.707831 | 25.457568 | -37.198468 | -61.986627 | 80.934355 | -15.371163 | 124.191791 | 420.924792 | -467.527398 | 1023.398744 | 358.237995 | 8.339412 | 87.927643 | -41.065759 | -55.855119 | -204.221354 | -248.588771 | 279.101240 | 1732.828689 | -7.506280 | 95.364288 | 215.443358 | 224.722604 | 613.713177 | 386.531595 | 164.225408 | -351.333956 | -311.673993 | 69.416125 | 66.890657 | 65.152941 | 63.894853 | 54.564748 | 48.576754 | 46.194837 | 43.259617 | 72.063808 | 69.237129 | 68.709731 | 70.131190 | 64.913414 | 57.727430 | 46.666199 | -516.719549 | 76.974101 | 73.717760 | 72.996905 | 75.131547 | 73.491688 | -709.462336 | 209.730429 | 114.098639 | 84.081111 | 80.243585 | 77.994913 | 82.338624 | -76.108750 | -150.418543 | -1473.684760 | 7781.500229 | 92.859728 | 88.928514 | 87.422435 | -880.956991 | 162.908663 | 74.524524 | -6.484657 | -291.506505 | 102.282020 | 98.845091 | -214.905796 | 228.199216 | 147.596146 | -397.487599 | -93.397519 | -24.135072 | 111.024889 | 107.391174 | 120.340638 | 733.877652 | 412.910415 | 601.384414 | 148.365363 | 213.940092 | 117.060352 | 113.669293 | 135.915835 | 279.995675 | -549.010212 | 487.023066 | -312.275049 | 410.112632 | 74.866093 | 71.011814 | 71.758152 | 78.625395 | 81.011794 | 79.013099 | 70.126627 | 52.460873 | 80.682569 | 74.898380 | 72.267758 | 79.728957 | 88.843922 | 96.316494 | -579.538285 | -1095.088728 | 89.583669 | 81.559668 | 76.775689 | 86.033068 | 36.499849 | 2.490026 | 136.001314 | -442.594893 | 100.364953 | 89.389647 | 85.575441 | 98.500735 | 105.380411 | 119.097158 | 12.906344 | -16.955022 | 110.595889 | 95.042308 | 92.704766 | 403.420215 | 203.997029 | 441.239426 | 168.142290 | 91.989654 | 117.114108 | 96.374022 | 93.830738 | 109.324191 | -381.759108 | -128.913884 | -1739.232881 | 81.761406 | 117.780950 | 96.426616 | 94.946625 | 7.404463 | 41.602428 | -899.167337 | -6.752040 | 32.687196 | 115.611983 | 100.985853 | -116.524115 | -104.606859 | -26.841846 | -333.285036 | 852.647672 | -154.045183 | 82.228720 | 73.592501 | 69.221332 | 78.007574 | 88.836508 | 101.437318 | 105.788452 | -90.033478 | 92.194732 | 79.442039 | 70.872041 | 76.970114 | 90.868420 | 114.213674 | 39.274411 | -56.885464 | 105.163738 | 87.077133 | 76.704754 | 80.249801 | 222.549782 | 316.140351 | 243.809935 | 94.303626 | 118.144849 | 92.380039 | 82.679285 | 85.695059 | 164.988435 | -4974.831311 | -1762.301163 | 1247.424473 | 127.721947 | 95.902677 | 37.891449 | 417.234635 | -484.373124 | -91.859279 | -19.678741 | -520.273606 | 133.394131 | 103.305045 | -66.830577 | 13409.417621 | -20812.568505 | -395.467189 | -206.313624 | -951.191414 | 140.817510 | -141.545719 | -232.840650 | 85.048745 | 571.136243 | -63474.037103 | 64397.176438 | -195.026524 | -1038.075008 | -3746.481672 | -20.527607 | -690.544107 | -37.897241 | 147.611694 | 726.137826 | 524.665062 | 91.785976 | 77.124957 | 69.504337 | 73.808309 | 83.125775 | 103.479315 | 119.081223 | 56.284117 | 106.680691 | 87.746496 | 75.054403 | 76.831019 | 174.109502 | 125.439833 | 411.646414 | 330.134402 | 124.067938 | 98.398933 | 85.534338 | 91.595652 | -8.532647 | 1170.477384 | -1664.251010 | -2947.363951 | 139.332737 | 107.840609 | 101.989793 | 339.364998 | -24.523970 | 203.147956 | -290.838825 | -748.760549 | 149.561363 | 119.736569 | -75.685833 | -153.204181 | 142.372493 | 68.351087 | 2.460172 | -826.231479 | 156.354265 | 76.120804 | 243.130184 | -23.875541 | -429.795628 | 303.648791 | -504.908419 | -766.942406 | 112.371048 | 5179.691533 | 3357.929380 | 8.272146 | 50.462056 | 2.894512 | 116.772371 | -1440.936257 | -1741.751575 | 45.967304 | 318.729461 | -120.328774 | -1121.558260 | 1980.343612 | -485.683929 | 11720.234750 | 102.148570 | 84.906910 | 77.121329 | 79.249986 | 90.919312 | 129.363606 | 218.540216 | 148.521769 | 121.529305 | 102.529676 | 91.442879 | 98.337896 | 181.420515 | -29640.919917 | 436.964466 | 31583.778825 | 141.233892 | 117.826459 | 113.325387 | 152.058562 | -35.693395 | -692.155733 | -204.047452 | -191.119513 | 154.902551 | 132.086998 | 156.262841 | 0.458755 | 327.179514 | -19.285506 | -358.139859 | -6401.661920 | 160.978426 | 630.710549 | -22.760755 | 69.408924 | -137.894830 | 439.945207 | -125.086519 | -1778.926313 | 199.783709 | -22.657745 | 117.003990 | 2.317060 | 20.549308 | -39.944383 | -66.628169 | -750.343078 | 350.019432 | -88.338751 | 155.462405 | -157.265218 | 5.365507 | 621.937134 | 12.982243 | 266.324384 | 1735.669650 | 270.620859 | 15.235241 | -34.310339 | -1046.699341 | 20.311230 | -44.599851 | 786.351362 | 108.042847 | 97.513363 | 93.352603 | 97.561244 | 117.588438 | 156.647169 | 103.901692 | 83.598722 | 129.254576 | 117.885116 | 115.390348 | 135.966982 | 16.044362 | 9.138270 | -4.926068 | -41.583578 | 146.698390 | 134.127632 | 145.264974 | -30.930889 | 4.413649 | -51.966108 | 49.140197 | -104.277803 | 156.042245 | 34.807447 | -374.391001 | -5.294676 | 52.795617 | -472.319744 | 38.848762 | -56.559915 | -3525.932026 | -67.290591 | 181.702971 | -23.194624 | -119.729796 | -33.682024 | 336.908717 | 7.971002 | 5568.351956 | -9768.395597 | -71.848967 | -1.926828 | -6.095702 | -32.226071 | -11.289816 | 21.996665 | -177.604835 | 54.243173 | 10.569448 | 122.273653 | 1.164949 | -167.423232 | 50.155085 | 7.585611 | 84.666862 | 4105.587565 | 1486.481442 | -0.671106 | -0.540521 | -4.496621 | -261.430870 | 62.847384 | 106.987243 | 109.445427 | 107.994375 | -146.749182 | -43.229508 | -203.078058 | -27.605721 | 51.986535 | 128.320362 | -1.189787 | -0.041610 | -12.629944 | -39.424643 | -0.760534 | 80.822768 | 45.479855 | 521.362114 | -11753.061200 | 66.299207 | -385.991736 | 49.766686 | 25.107929 | 1534.788849 | 57.595466 | -13.907731 | -1061.388849 | -13.219931 | 107597.299513 | 737.400473 | 3945.383882 | -104.031711 | 90.376159 | -11.122695 | -4.087399 | 9764.262497 | -547.400091 | -96.049963 | -3.772974 | 65.418392 | 622.361366 | 118.743265 | 4808.348670 | -7.814730 | -42.208290 | 88.477334 | -319.997924 | -18637.511637 | 316.504910 | -197.668720 | -60.767858 | 4209.590285 | 176.015854 | 645.045577 | 29157.922603 | -527.756459 | 1139.068058 | 958.761852 | 2780.171143 | 504.647939 | -97.362119 | 7.151031 | 447.534299 | -30.498716 | -1019.135910 |
| 1991-05-31 00:00:00 | 60.000000 | 68.000000 | 63.929158 | 58.902567 | 51.848795 | 43.218257 | 38.101884 | 48.920354 | 60.422387 | -274.501899 | 62.407749 | 54.221856 | 44.093163 | 34.653626 | 30.648733 | 38.682662 | -36.925839 | -64.468349 | 63.374366 | 55.190713 | 47.582887 | 40.879478 | 36.731042 | -2378.652607 | 157.059681 | 170.444590 | 68.072894 | 61.290630 | 55.026709 | 50.039713 | 44.229378 | -372.008864 | -55.287693 | -310.825775 | 74.416613 | 68.225598 | 61.738328 | -285.629080 | -668.829653 | -142.817754 | -42.024128 | -51.656803 | 81.653120 | -17.525373 | 136.877064 | -375.983375 | -231.183802 | -419.714392 | -2941.967254 | 25.205272 | 89.844827 | -20.254089 | -69.887136 | -246.263736 | -240.883694 | -369.184804 | -479.853849 | 6.779177 | 98.729828 | 381.954707 | 409.239398 | 1184.273395 | 663.673162 | -83.843144 | -204.933925 | -199.090605 | 67.575806 | 63.276111 | 59.165749 | 56.070189 | 43.539457 | 34.244634 | 29.429899 | 26.396612 | 70.721266 | 65.828818 | 62.176282 | 60.295996 | 50.947490 | 40.436493 | 29.202016 | -330.372966 | 76.442913 | 70.586230 | 65.447060 | 62.505340 | 55.541567 | 690.718200 | -1259.283894 | 387.030769 | 84.902323 | 77.832251 | 69.549042 | 66.744043 | -66.555374 | -123.264361 | -231.848662 | -357.704182 | 95.618510 | 87.678608 | 78.047936 | -2306.070712 | 157.719126 | 84.220518 | 2.536018 | -151.161235 | 107.347118 | 98.721073 | -706.866126 | 401.901036 | 240.769458 | -657.778076 | -1156.315180 | -5.417430 | 118.319498 | 107.226095 | 101.873415 | 2177.775994 | 1333.701726 | 1603.912046 | 864.402776 | 195.771991 | 125.677413 | 111.226338 | 103.579180 | 130.172588 | 5473.612591 | -1080.448460 | 506.613751 | 377.697646 | 73.423949 | 67.467669 | 63.552793 | 65.167567 | 60.579796 | 50.981016 | 38.026521 | 23.752039 | 80.578613 | 72.144707 | 63.412308 | 62.532317 | 59.775202 | 52.816842 | -118.310996 | -145.579158 | 91.827902 | 80.509809 | 67.713591 | 64.702103 | 8.618757 | 7.219074 | 79.729912 | -166.451344 | 106.064122 | 90.774128 | 75.963191 | 70.664697 | 118.245346 | 134.011812 | 18.737597 | -5.949877 | 120.537134 | 98.387273 | 81.434193 | 406.228439 | 187.346910 | 566.573909 | 245.389875 | 122.009214 | 131.112197 | 99.929518 | 80.521758 | 69.844958 | 3536.867787 | -258.909980 | -2666.803745 | -164.683391 | 134.256895 | 99.183808 | 79.986499 | 241.534867 | -222.447439 | 957.699725 | 7.516809 | 3.257556 | 132.984793 | 103.411359 | -614.280607 | -115.218685 | 775.173254 | -3873.540357 | 681.791760 | -114.342796 | 82.271030 | 71.521454 | 60.793064 | 60.436414 | 57.097215 | 50.247315 | 39.859498 | -35.001515 | 95.454112 | 80.198711 | 63.792206 | 57.787454 | 52.541981 | 46.826320 | 21.685264 | -27.921552 | 113.740248 | 92.280371 | 71.079293 | 59.347697 | 187.925073 | 204.879006 | 146.415044 | 75.789074 | 134.151450 | 101.826518 | 77.418561 | 61.347224 | -18.944324 | 361.275520 | 596.078654 | 9299.395931 | 152.056056 | 107.761232 | 36.977158 | 393.229457 | 229.829255 | 710.445773 | 153.193511 | -578.875169 | 164.829087 | 116.260717 | -184.641414 | 44969.783063 | 1285.107335 | 113.885934 | 3744.823330 | 179.161868 | 177.419203 | -74.211318 | -216.736439 | 45.272791 | 3452.122800 | 861.753497 | -1786.679601 | -165.185740 | -934.536236 | -64.288788 | -33.251458 | 23423.531342 | -145.151237 | -76.382309 | 119.804987 | 91.901724 | 94.641362 | 78.305246 | 63.383166 | 57.124268 | 49.528007 | 43.493782 | 37.534998 | 16.217110 | 115.869325 | 95.122792 | 72.461404 | 59.687480 | -166.406408 | -31847.644400 | 111.259796 | 87.569198 | 143.349796 | 113.221615 | 85.383419 | 69.488951 | -5.411668 | 3.041709 | 133.262013 | 236.285327 | 171.230169 | 128.248990 | 100.645208 | -242.527210 | -123.098667 | 21.457095 | 264.139015 | 221.587894 | 192.442168 | 142.623256 | -176.535100 | -113.411367 | 239.338400 | 22.121736 | 29.734054 | 54.543734 | 205.386877 | 93.966839 | 1341.008107 | -17.138095 | -95.534710 | 32.262906 | 88.094539 | -101.069486 | 145.494055 | 3708.351854 | -7799.960174 | 0.321158 | 21.471738 | -6.366522 | 130.864928 | -233.205217 | 183.164680 | -65.385296 | 1409.301875 | 182.015389 | -582.735559 | 1179.767981 | -529.476009 | -243.509848 | 108.819010 | 89.857004 | 74.851635 | 64.036838 | 52.943494 | 45.218004 | 54.379902 | 27.856130 | 138.099841 | 117.501107 | 94.076419 | 79.785671 | 107.547201 | 875.578319 | 54.210005 | 30.138996 | 171.852712 | 142.135477 | 117.138566 | 112.729923 | -3389.844879 | 14.214302 | -11.023608 | 4.071822 | 199.164346 | 161.442242 | 149.623861 | 10.969776 | 1.300286 | -191.802795 | -1015.091073 | -38.027904 | 212.950978 | 344.794002 | -1.363899 | 326.811970 | 246.080544 | -468.080473 | -1233.859473 | -59.379735 | 77.471566 | -44.818156 | 174.377647 | 4.658893 | -19.170924 | 17.140615 | 34.641301 | -33.977464 | 2320.916484 | -230.334310 | 187.391746 | 677.937095 | 12.646661 | 334.739760 | 2.545559 | 27.896025 | 4848.188650 | 268.124707 | 16.679181 | 114.445059 | -391.967874 | -4.369702 | 13.537991 | -9887.124902 | 117.716814 | 106.139299 | 94.156570 | 80.358265 | 64.047507 | 48.769805 | 70.546645 | 4.630623 | 150.447139 | 137.955731 | 119.794987 | 105.712122 | -9.649902 | -3.901529 | 102.226170 | -3.470657 | 181.222757 | 162.854747 | 131.902461 | 9.398304 | 0.338891 | -27.705164 | 18.493272 | -12.990514 | 199.301091 | 65.534470 | 276.304906 | -2.256880 | -14.630115 | 54.483532 | 41771.223719 | -11.836115 | 199.325158 | -47.247092 | 351.278832 | 6.273399 | -166.131852 | -24.245054 | -59.804938 | 4.091998 | -1020.437391 | -2009.595362 | 47.693527 | 1.280560 | -11.946583 | 31.647276 | -66.691455 | 31.517316 | 174.474866 | 2496.642956 | -30.318795 | 1083.642757 | 113.779844 | 188.956704 | 11.940013 | -11.775513 | -1482.608142 | 9403.898619 | 1241.758948 | -3.183105 | -2.254066 | 234.722843 | -153.094762 | -30.037641 | 115.899082 | 118.561793 | 103.789197 | 162.792897 | -13.156731 | -452.587650 | -20.108612 | 15.479852 | 146.755172 | 28.645375 | 0.577060 | 10.863883 | 31.227165 | 0.431176 | 21.531352 | 26.714216 | -829.763120 | -120.801117 | -170.756450 | 20.570106 | -9.231606 | -1.835699 | -1034.244418 | 202.527173 | 94.997021 | 94.312580 | -14.517651 | -348.539564 | -882.201380 | 183.056192 | -75.591304 | -83.570364 | -4.331120 | -17.273689 | -11842.312380 | -1122.566821 | -140.388450 | -20.853267 | 9.553022 | 375.660649 | -484.872488 | 162.612133 | -344.258570 | -38.412503 | -199.971187 | -179.503748 | -52738.490846 | -55.661386 | 36.249525 | -265.743222 | 3032.617463 | -19238.684595 | 173.157023 | -8095001.798788 | 83.373492 | -58.026894 | -119.764301 | -6983.104886 | -615.721568 | -45.560563 | 28.656857 | 24.508461 | 19.470596 | -65.859974 |
| Alpha Values | Beta Values | Gamma Values | Train RMSE | Test RMSE | |
|---|---|---|---|---|---|
| 1 | 0.3 | 0.3 | 0.4 | 24.209084 | 10.169599 |
| 17 | 0.3 | 0.5 | 0.4 | 25.873405 | 10.375164 |
| 26 | 0.3 | 0.6 | 0.5 | 28.141788 | 10.777034 |
| 84 | 0.4 | 0.5 | 0.7 | 31.193940 | 13.368421 |
| 76 | 0.4 | 0.4 | 0.7 | 29.933604 | 17.231419 |
| Test RMSE | |
|---|---|
| Alpha=0.3,Beta=0.3,Gamma=0.8,TripleExponentialSmoothing | 10.169599 |
| 2_point_trailing_Moving_Average | 11.529278 |
| 4_point_trailing_Moving_Average | 14.451403 |
| 6_point_trailing_Moving_Average | 14.566327 |
| 9_point_trailing_Moving_Average | 14.727630 |
| RegressionOnTime | 15.268955 |
| Alpha=0.0994,Beta=3.501,Gamma=0.0003,TripleExponentialSmoothing | 21.224438 |
| Alpha=0.102,SimpleExponentialSmoothing | 36.796242 |
| Alpha=0.9,SimpleExponentialSmoothing | 47.504821 |
| SimpleAverageModel | 53.460570 |
| NaiveModel | 79.718773 |
| Alpha=0.3,Beta=0.4,DoubleExponentialSmoothing | 265.567594 |
# 'smoothing_level': 0.09942398570261507,
# 'smoothing_trend': 3.5015167744954147e-06,
# 'smoothing_seasonal': 0.0003177759456525393,
plt.figure(figsize=(18,9))
plt.plot(train["Rose"], label="Train")
plt.plot(test["Rose"], label="Test")
plt.plot(SES_test["predict"], label="Alpha =0.03 Simple Exponential Smoothing predictions on Test Set")
plt.plot(DES_test["predict", 0.3, 0.4], label="Alpha=0.3,Beta=0.3,DoubleExponentialSmoothing predictions on Test Set")
plt.plot(TES_test["predict", 0.3, 0.3, 0.8], label="Alpha=0.3,Beta=0.3,Gamma=0.8,TripleExponentialSmoothing predictions on Test Set")
plt.legend(loc="best")
plt.grid();
plt.title("Plot of Exponential Smoothing Predictions and the Acutal Values");
plt.show()
# 'smoothing_level': 0.09942398570261507,
# 'smoothing_trend': 3.5015167744954147e-06,
# 'smoothing_seasonal': 0.0003177759456525393,
fullmodel1 = ExponentialSmoothing(rdf,trend="additive",seasonal="multiplicative")
fullmodel1= fullmodel1.fit(smoothing_level=0.09942398570261507, smoothing_trend=3.5015167744954147e-06, smoothing_seasonal=0.0003177759456525393)
RMSE_fullmodel1 = rmse(rdf["Rose"], fullmodel1.fittedvalues)
display("Full model RMSE", RMSE_fullmodel1)
prediction_1 = fullmodel1.forecast(steps=len(test))
rdf.plot(legend=True, label="Actual")
prediction_1.plot(legend=True, label="Forecast")
plt.show()
'Full model RMSE'
16.098963863336504
pred_1_df = pd.DataFrame({"lower_CI":prediction_1 - 1.96*fullmodel1.resid.std(),
"prediction":prediction_1,
"upper_ci": prediction_1 + 1.96*fullmodel1.resid.std()})
display(pred_1_df.head().style)
axis = rdf.plot(label="Actual", figsize=(15,8))
pred_1_df["prediction"].plot(ax=axis, label="Forecast", alpha=1) # alpha here is for transparency of the prediction line
axis.fill_between(pred_1_df.index, pred_1_df["lower_CI"], pred_1_df["upper_ci"], color="green", alpha=.15) # alpha here denotes the transparency of the shaded region
axis.set_xlabel("Year-Months")
axis.set_ylabel("Rose")
plt.legend(loc="best")
plt.grid()
plt.show();
| lower_CI | prediction | upper_ci | |
|---|---|---|---|
| 1995-08-31 00:00:00 | 15.535791 | 47.174468 | 78.813145 |
| 1995-09-30 00:00:00 | 12.342879 | 43.981556 | 75.620233 |
| 1995-10-31 00:00:00 | 11.223246 | 42.861923 | 74.500599 |
| 1995-11-30 00:00:00 | 17.440160 | 49.078837 | 80.717514 |
| 1995-12-31 00:00:00 | 35.069937 | 66.708614 | 98.347291 |
from statsmodels.tsa.stattools import adfuller
display("Results of Dickey-Fuller Test:")
dftest = adfuller(rdf["Rose"])
dfoutput = pd.Series(dftest[0:4], index=["Test Statistic","p-value","#Lags Used","Number of Observations Used"])
for key,value in dftest[4].items():
dfoutput["Critical Value (%s)"%key] = value
display(dfoutput)
display("P Value: ", dftest[1], "H0 rejected and the time series is stationary")
'Results of Dickey-Fuller Test:'
Test Statistic -1.876699 p-value 0.343101 #Lags Used 13.000000 Number of Observations Used 173.000000 Critical Value (1%) -3.468726 Critical Value (5%) -2.878396 Critical Value (10%) -2.575756 dtype: float64
'P Value: '
0.34310071428337663
'H0 rejected and the time series is stationary'
import itertools # library for generating all possible combinations of given number sets
from statsmodels.tsa.arima_model import ARIMA
p = q = range(0, 4)
d= range(1,2) # required as itertools product function expects the parameters as range objects, even if it is only value
pdq = list(itertools.product(p, d, q))
# Creating an empty Dataframe with column names only
ARIMA_AIC = pd.DataFrame(columns=["Param", "AIC"])
ARIMA_AIC
for param in pdq:
ARIMA_model = ARIMA(train["Rose"], order=param).fit()
display(f"ARIMA{param} - AIC:{ARIMA_model.aic}")
ARIMA_AIC=ARIMA_AIC.append({"Param":param, "AIC": ARIMA_model.aic}, ignore_index=True)
display(ARIMA_AIC.sort_values(by=["AIC"],ascending=True))
'ARIMA(0, 1, 0) - AIC:1335.1526583086775'
'ARIMA(0, 1, 1) - AIC:1280.7261830464295'
'ARIMA(0, 1, 2) - AIC:1276.8353724265137'
'ARIMA(0, 1, 3) - AIC:1278.074261468577'
'ARIMA(1, 1, 0) - AIC:1319.3483105801872'
'ARIMA(1, 1, 1) - AIC:1277.7757508081509'
'ARIMA(1, 1, 2) - AIC:1277.3592218920899'
'ARIMA(1, 1, 3) - AIC:1279.3126413359505'
'ARIMA(2, 1, 0) - AIC:1300.6092611743966'
'ARIMA(2, 1, 1) - AIC:1279.0456894093159'
'ARIMA(2, 1, 2) - AIC:1279.2986939364937'
'ARIMA(2, 1, 3) - AIC:1281.1962260422388'
'ARIMA(3, 1, 0) - AIC:1299.4787391543114'
'ARIMA(3, 1, 1) - AIC:1279.6059711231028'
'ARIMA(3, 1, 2) - AIC:1280.9692522114606'
'ARIMA(3, 1, 3) - AIC:1273.1940939395067'
| Param | AIC | |
|---|---|---|
| 15 | (3, 1, 3) | 1273.194094 |
| 2 | (0, 1, 2) | 1276.835372 |
| 6 | (1, 1, 2) | 1277.359222 |
| 5 | (1, 1, 1) | 1277.775751 |
| 3 | (0, 1, 3) | 1278.074261 |
| 9 | (2, 1, 1) | 1279.045689 |
| 10 | (2, 1, 2) | 1279.298694 |
| 7 | (1, 1, 3) | 1279.312641 |
| 13 | (3, 1, 1) | 1279.605971 |
| 1 | (0, 1, 1) | 1280.726183 |
| 14 | (3, 1, 2) | 1280.969252 |
| 11 | (2, 1, 3) | 1281.196226 |
| 12 | (3, 1, 0) | 1299.478739 |
| 8 | (2, 1, 0) | 1300.609261 |
| 4 | (1, 1, 0) | 1319.348311 |
| 0 | (0, 1, 0) | 1335.152658 |
auto_ARIMA = ARIMA(train["Rose"], order=(3,1,3),freq='M')
results_auto_ARIMA = auto_ARIMA.fit()
display(results_auto_ARIMA.summary())
| Dep. Variable: | D.Rose | No. Observations: | 131 |
|---|---|---|---|
| Model: | ARIMA(3, 1, 3) | Log Likelihood | -628.597 |
| Method: | css-mle | S.D. of innovations | 28.356 |
| Date: | Sun, 15 Aug 2021 | AIC | 1273.194 |
| Time: | 08:41:30 | BIC | 1296.196 |
| Sample: | 02-29-1980 | HQIC | 1282.541 |
| - 12-31-1990 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| const | -0.4906 | 0.088 | -5.547 | 0.000 | -0.664 | -0.317 |
| ar.L1.D.Rose | -0.7243 | 0.086 | -8.405 | 0.000 | -0.893 | -0.555 |
| ar.L2.D.Rose | -0.7218 | 0.087 | -8.336 | 0.000 | -0.891 | -0.552 |
| ar.L3.D.Rose | 0.2764 | 0.086 | 3.232 | 0.001 | 0.109 | 0.444 |
| ma.L1.D.Rose | -0.0151 | 0.045 | -0.339 | 0.734 | -0.102 | 0.072 |
| ma.L2.D.Rose | 0.0151 | 0.044 | 0.341 | 0.733 | -0.072 | 0.102 |
| ma.L3.D.Rose | -1.0000 | 0.046 | -21.894 | 0.000 | -1.090 | -0.910 |
| Real | Imaginary | Modulus | Frequency | |
|---|---|---|---|---|
| AR.1 | -0.5011 | -0.8661j | 1.0006 | -0.3335 |
| AR.2 | -0.5011 | +0.8661j | 1.0006 | 0.3335 |
| AR.3 | 3.6138 | -0.0000j | 3.6138 | -0.0000 |
| MA.1 | 1.0000 | -0.0000j | 1.0000 | -0.0000 |
| MA.2 | -0.4924 | -0.8703j | 1.0000 | -0.3319 |
| MA.3 | -0.4924 | +0.8703j | 1.0000 | 0.3319 |
predicted_auto_ARIMA = results_auto_ARIMA.forecast(steps=len(test))
RMSE_autoarima = rmse(test["Rose"],predicted_auto_ARIMA[0])
display(RMSE_autoarima)
resultsDf_arima = pd.DataFrame({'Test RMSE': [RMSE_autoarima]}
,index=['ARIMA(3,1,3)'])
resultsDf = pd.concat([resultsDf, resultsDf_arima])
display(resultsDf)
15.987025695134827
| Test RMSE | |
|---|---|
| RegressionOnTime | 15.268955 |
| NaiveModel | 79.718773 |
| SimpleAverageModel | 53.460570 |
| 2_point_trailing_Moving_Average | 11.529278 |
| 4_point_trailing_Moving_Average | 14.451403 |
| 6_point_trailing_Moving_Average | 14.566327 |
| 9_point_trailing_Moving_Average | 14.727630 |
| Alpha=0.102,SimpleExponentialSmoothing | 36.796242 |
| Alpha=0.9,SimpleExponentialSmoothing | 47.504821 |
| Alpha=0.3,Beta=0.4,DoubleExponentialSmoothing | 265.567594 |
| Alpha=0.0994,Beta=3.501,Gamma=0.0003,TripleExponentialSmoothing | 21.224438 |
| Alpha=0.3,Beta=0.3,Gamma=0.8,TripleExponentialSmoothing | 10.169599 |
| ARIMA(3,1,3) | 15.987026 |
from statsmodels.graphics.tsaplots import plot_acf, plot_pacf
plot_acf(rdf["Rose"].diff().dropna(),lags=50,title='Differenced Data Autocorrelation');
plot_pacf(rdf["Rose"].diff().dropna(),lags=50,title='Differenced Data Partial Autocorrelation');
plt.show()
display("p value from PACF: 4 & q value from ACF: 2")
manual_ARIMA = ARIMA(train["Rose"].astype("float64"), order=(4,1,2),freq="M")
results_manual_ARIMA = manual_ARIMA.fit()
display(results_manual_ARIMA.summary())
predicted_manual_ARIMA = results_manual_ARIMA.forecast(steps=len(test))
RMSE_manualarima = rmse(test["Rose"],
predicted_manual_ARIMA[0])
resultsDf_manual_arima = pd.DataFrame({'Test RMSE': [RMSE_manualarima]}
,index=['Manual ARIMA(4,1,2)'])
resultsDf = pd.concat([resultsDf, resultsDf_manual_arima])
display(resultsDf)
'p value from PACF: 4 & q value from ACF: 2'
| Dep. Variable: | D.Rose | No. Observations: | 131 |
|---|---|---|---|
| Model: | ARIMA(4, 1, 2) | Log Likelihood | -633.876 |
| Method: | css-mle | S.D. of innovations | 29.793 |
| Date: | Sun, 15 Aug 2021 | AIC | 1283.753 |
| Time: | 08:41:31 | BIC | 1306.754 |
| Sample: | 02-29-1980 | HQIC | 1293.099 |
| - 12-31-1990 |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| const | -0.1905 | 0.576 | -0.331 | 0.741 | -1.319 | 0.938 |
| ar.L1.D.Rose | 1.1685 | 0.087 | 13.391 | 0.000 | 0.997 | 1.340 |
| ar.L2.D.Rose | -0.3562 | 0.132 | -2.693 | 0.007 | -0.616 | -0.097 |
| ar.L3.D.Rose | 0.1855 | 0.132 | 1.402 | 0.161 | -0.074 | 0.445 |
| ar.L4.D.Rose | -0.2227 | 0.091 | -2.443 | 0.015 | -0.401 | -0.044 |
| ma.L1.D.Rose | -1.9506 | nan | nan | nan | nan | nan |
| ma.L2.D.Rose | 1.0000 | nan | nan | nan | nan | nan |
| Real | Imaginary | Modulus | Frequency | |
|---|---|---|---|---|
| AR.1 | 1.1027 | -0.4116j | 1.1770 | -0.0569 |
| AR.2 | 1.1027 | +0.4116j | 1.1770 | 0.0569 |
| AR.3 | -0.6863 | -1.6643j | 1.8003 | -0.3122 |
| AR.4 | -0.6863 | +1.6643j | 1.8003 | 0.3122 |
| MA.1 | 0.9753 | -0.2209j | 1.0000 | -0.0355 |
| MA.2 | 0.9753 | +0.2209j | 1.0000 | 0.0355 |
| Test RMSE | |
|---|---|
| RegressionOnTime | 15.268955 |
| NaiveModel | 79.718773 |
| SimpleAverageModel | 53.460570 |
| 2_point_trailing_Moving_Average | 11.529278 |
| 4_point_trailing_Moving_Average | 14.451403 |
| 6_point_trailing_Moving_Average | 14.566327 |
| 9_point_trailing_Moving_Average | 14.727630 |
| Alpha=0.102,SimpleExponentialSmoothing | 36.796242 |
| Alpha=0.9,SimpleExponentialSmoothing | 47.504821 |
| Alpha=0.3,Beta=0.4,DoubleExponentialSmoothing | 265.567594 |
| Alpha=0.0994,Beta=3.501,Gamma=0.0003,TripleExponentialSmoothing | 21.224438 |
| Alpha=0.3,Beta=0.3,Gamma=0.8,TripleExponentialSmoothing | 10.169599 |
| ARIMA(3,1,3) | 15.987026 |
| Manual ARIMA(4,1,2) | 33.949388 |
plot_acf(rdf["Rose"].diff().dropna(),lags=50,title="Differenced Data Autocorrelation");
plt.show()
display("Seasonality is observed for 6 and 12")
'Seasonality is observed for 6 and 12'
from statsmodels.tsa.statespace.sarimax import SARIMAX
p = q = range(0, 3)
d= range(1,2)
D = range(0,1)
pdq = list(itertools.product(p, d, q))
model_pdq = [(x[0], x[1], x[2], 6) for x in list(itertools.product(p, D, q))] # seasonal PDQ
SARIMA_AIC = pd.DataFrame(columns=['param','seasonal', 'AIC'])
for param in pdq:
for param_seasonal in model_pdq:
SARIMA_model = SARIMAX(train["Rose"],order=param,
seasonal_order = param_seasonal,enforce_stationarity=False,
enforce_invertibility=False)
results_SARIMA = SARIMA_model.fit(maxiter=1000)
SARIMA_AIC = SARIMA_AIC.append({"param":param,
"seasonal":param_seasonal,
"AIC": results_SARIMA.aic},
ignore_index=True)
SARIMA_AIC.sort_values(by=['AIC']).head()
| param | seasonal | AIC | |
|---|---|---|---|
| 53 | (1, 1, 2) | (2, 0, 2, 6) | 1041.655818 |
| 26 | (0, 1, 2) | (2, 0, 2, 6) | 1043.600261 |
| 80 | (2, 1, 2) | (2, 0, 2, 6) | 1045.220454 |
| 71 | (2, 1, 1) | (2, 0, 2, 6) | 1051.673461 |
| 44 | (1, 1, 1) | (2, 0, 2, 6) | 1052.778470 |
auto_SARIMA_6 = SARIMAX(train["Rose"].values,
order=(0,1,2),
seasonal_order=(2,0,2,6),
enforce_stationarity=False,
enforce_ivertibility=False)
results_auto_SARIMA_6 = auto_SARIMA_6.fit(maxiter=1000)
display(results_auto_SARIMA_6.summary())
| Dep. Variable: | y | No. Observations: | 132 |
|---|---|---|---|
| Model: | SARIMAX(0, 1, 2)x(2, 0, 2, 6) | Log Likelihood | -514.804 |
| Date: | Sun, 15 Aug 2021 | AIC | 1043.608 |
| Time: | 08:42:16 | BIC | 1062.884 |
| Sample: | 0 | HQIC | 1051.433 |
| - 132 | |||
| Covariance Type: | opg |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| ma.L1 | -0.7855 | 1.121 | -0.701 | 0.483 | -2.982 | 1.411 |
| ma.L2 | -0.2113 | 0.286 | -0.739 | 0.460 | -0.772 | 0.349 |
| ar.S.L6 | -0.0727 | 0.037 | -1.972 | 0.049 | -0.145 | -0.000 |
| ar.S.L12 | 0.8368 | 0.042 | 19.859 | 0.000 | 0.754 | 0.919 |
| ma.S.L6 | 0.2230 | 13.328 | 0.017 | 0.987 | -25.899 | 26.345 |
| ma.S.L12 | -0.7757 | 10.376 | -0.075 | 0.940 | -21.112 | 19.560 |
| sigma2 | 348.6267 | 4636.610 | 0.075 | 0.940 | -8738.962 | 9436.216 |
| Ljung-Box (L1) (Q): | 0.14 | Jarque-Bera (JB): | 90.79 |
|---|---|---|---|
| Prob(Q): | 0.70 | Prob(JB): | 0.00 |
| Heteroskedasticity (H): | 0.42 | Skew: | 0.37 |
| Prob(H) (two-sided): | 0.01 | Kurtosis: | 7.27 |
predicted_auto_SARIMA_6 = results_auto_SARIMA_6.get_forecast(steps=len(test))
display(predicted_auto_SARIMA_6.summary_frame(alpha=0.05).head())
rmse_autosarima6 = rmse(test["Rose"], predicted_auto_SARIMA_6.predicted_mean)
display(rmse_autosarima6)
temp_resultsDf = pd.DataFrame({"Test RMSE" : [rmse_autosarima6]}, index = ["SARIMA(0,1,2)(2,0,2,6)"])
resultsDf = pd.concat([resultsDf, temp_resultsDf])
display(resultsDf)
| y | mean | mean_se | mean_ci_lower | mean_ci_upper |
|---|---|---|---|---|
| 0 | 69.069584 | 19.191843 | 31.454262 | 106.684905 |
| 1 | 67.815724 | 19.664604 | 29.273809 | 106.357639 |
| 2 | 76.132646 | 19.655994 | 37.607607 | 114.657686 |
| 3 | 71.774607 | 19.656086 | 33.249387 | 110.299826 |
| 4 | 76.560597 | 19.656179 | 38.035194 | 115.086000 |
27.366827145873856
| Test RMSE | |
|---|---|
| RegressionOnTime | 15.268955 |
| NaiveModel | 79.718773 |
| SimpleAverageModel | 53.460570 |
| 2_point_trailing_Moving_Average | 11.529278 |
| 4_point_trailing_Moving_Average | 14.451403 |
| 6_point_trailing_Moving_Average | 14.566327 |
| 9_point_trailing_Moving_Average | 14.727630 |
| Alpha=0.102,SimpleExponentialSmoothing | 36.796242 |
| Alpha=0.9,SimpleExponentialSmoothing | 47.504821 |
| Alpha=0.3,Beta=0.4,DoubleExponentialSmoothing | 265.567594 |
| Alpha=0.0994,Beta=3.501,Gamma=0.0003,TripleExponentialSmoothing | 21.224438 |
| Alpha=0.3,Beta=0.3,Gamma=0.8,TripleExponentialSmoothing | 10.169599 |
| ARIMA(3,1,3) | 15.987026 |
| Manual ARIMA(4,1,2) | 33.949388 |
| SARIMA(0,1,2)(2,0,2,6) | 27.366827 |
p = q = range(0, 3)
d= range(1,2)
D = range(0,1)
pdq = list(itertools.product(p, d, q))
model_pdq = [(x[0], x[1], x[2], 12) for x in list(itertools.product(p, D, q))]
SARIMA_AIC = pd.DataFrame(columns=['param','seasonal', 'AIC'])
for param in pdq:
for param_seasonal in model_pdq:
SARIMA_model =SARIMAX(train["Rose"],
order=param,
seasonal_order = param_seasonal,
enforce_stationarity=False,
enforce_invertibility=False)
results_SARIMA = SARIMA_model.fit(maxiter=1000)
SARIMA_AIC = SARIMA_AIC.append({"param":param,
"seasonal":param_seasonal,
"AIC": results_SARIMA.aic},
ignore_index=True)
SARIMA_AIC.sort_values(by=["AIC"]).head()
| param | seasonal | AIC | |
|---|---|---|---|
| 26 | (0, 1, 2) | (2, 0, 2, 12) | 887.937509 |
| 53 | (1, 1, 2) | (2, 0, 2, 12) | 889.871767 |
| 80 | (2, 1, 2) | (2, 0, 2, 12) | 890.668799 |
| 69 | (2, 1, 1) | (2, 0, 0, 12) | 896.518161 |
| 78 | (2, 1, 2) | (2, 0, 0, 12) | 897.346444 |
auto_SARIMA_12 =SARIMAX(train["Rose"],
order=(0, 1, 2),
seasonal_order=(2, 0, 2, 12),
enforce_stationarity=False,
enforce_invertibility=False)
results_auto_SARIMA_12 = auto_SARIMA_12.fit(maxiter=1000)
display(results_auto_SARIMA_12.summary())
| Dep. Variable: | Rose | No. Observations: | 132 |
|---|---|---|---|
| Model: | SARIMAX(0, 1, 2)x(2, 0, 2, 12) | Log Likelihood | -436.969 |
| Date: | Sun, 15 Aug 2021 | AIC | 887.938 |
| Time: | 08:43:45 | BIC | 906.448 |
| Sample: | 01-31-1980 | HQIC | 895.437 |
| - 12-31-1990 | |||
| Covariance Type: | opg |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| ma.L1 | -0.8427 | 189.844 | -0.004 | 0.996 | -372.930 | 371.245 |
| ma.L2 | -0.1573 | 29.826 | -0.005 | 0.996 | -58.614 | 58.300 |
| ar.S.L12 | 0.3467 | 0.079 | 4.375 | 0.000 | 0.191 | 0.502 |
| ar.S.L24 | 0.3023 | 0.076 | 3.996 | 0.000 | 0.154 | 0.451 |
| ma.S.L12 | 0.0767 | 0.133 | 0.577 | 0.564 | -0.184 | 0.337 |
| ma.S.L24 | -0.0726 | 0.146 | -0.498 | 0.618 | -0.358 | 0.213 |
| sigma2 | 251.3137 | 4.77e+04 | 0.005 | 0.996 | -9.33e+04 | 9.38e+04 |
| Ljung-Box (L1) (Q): | 0.10 | Jarque-Bera (JB): | 2.33 |
|---|---|---|---|
| Prob(Q): | 0.75 | Prob(JB): | 0.31 |
| Heteroskedasticity (H): | 0.88 | Skew: | 0.37 |
| Prob(H) (two-sided): | 0.70 | Kurtosis: | 3.03 |
predicted_auto_SARIMA_12 = results_auto_SARIMA_12.get_forecast(steps=len(test))
display(predicted_auto_SARIMA_12.summary_frame(alpha=0.05).head())
rmse_autosarima12 = rmse(test["Rose"], predicted_auto_SARIMA_12.predicted_mean)
display("RMSE", rmse_autosarima12)
temp_resultsDf = pd.DataFrame({"Test RMSE": [rmse_autosarima12]}, index=['SARIMA(0,1,2)(2,0,2,12)'])
resultsDf = pd.concat([resultsDf, temp_resultsDf])
display(resultsDf)
| Rose | mean | mean_se | mean_ci_lower | mean_ci_upper |
|---|---|---|---|---|
| 1991-01-31 | 62.867264 | 15.928501 | 31.647976 | 94.086552 |
| 1991-02-28 | 70.541190 | 16.147659 | 38.892360 | 102.190020 |
| 1991-03-31 | 77.356411 | 16.147656 | 45.707586 | 109.005236 |
| 1991-04-30 | 76.208814 | 16.147656 | 44.559989 | 107.857639 |
| 1991-05-31 | 72.747398 | 16.147656 | 41.098573 | 104.396223 |
'RMSE'
26.928361755113336
| Test RMSE | |
|---|---|
| RegressionOnTime | 15.268955 |
| NaiveModel | 79.718773 |
| SimpleAverageModel | 53.460570 |
| 2_point_trailing_Moving_Average | 11.529278 |
| 4_point_trailing_Moving_Average | 14.451403 |
| 6_point_trailing_Moving_Average | 14.566327 |
| 9_point_trailing_Moving_Average | 14.727630 |
| Alpha=0.102,SimpleExponentialSmoothing | 36.796242 |
| Alpha=0.9,SimpleExponentialSmoothing | 47.504821 |
| Alpha=0.3,Beta=0.4,DoubleExponentialSmoothing | 265.567594 |
| Alpha=0.0994,Beta=3.501,Gamma=0.0003,TripleExponentialSmoothing | 21.224438 |
| Alpha=0.3,Beta=0.3,Gamma=0.8,TripleExponentialSmoothing | 10.169599 |
| ARIMA(3,1,3) | 15.987026 |
| Manual ARIMA(4,1,2) | 33.949388 |
| SARIMA(0,1,2)(2,0,2,6) | 27.366827 |
| SARIMA(0,1,2)(2,0,2,12) | 26.928362 |
plot_acf(rdf["Rose"].diff().dropna(), lags=50, title="Differenced Data Autocorrelation")
plot_pacf(rdf["Rose"].diff().dropna(), lags=50, title="Differenced Data Partial Autocorrelation")
plt.show()
rdf.plot()
plt.grid()
plt.show()
(rdf["Rose"].diff(6)).plot()
plt.grid()
plt.show()
We see that there might be a slight trend which can be noticed in the data. So we take a differencing of first order on the seasonally differenced series.
(rdf["Rose"].diff(6)).diff().plot()
plt.grid()
plt.show()
pValue = adfuller(train["Rose"])[1]
print("P value: ",pValue, " is greater than 0.05, there by H0 - Series in Not-Stationary is rejected")
P value: 0.21947564129072383 is greater than 0.05, there by H0 - Series in Not-Stationary is rejected
check the stationarity of the above series before fitting the SARIMA model.
# adfuller((train["Rose"].diff(6).dropna()).diff(1).dropna())[1]
Checking the ACF and the PACF plots for the new modified Time Series.
# plot_acf((rdf["Rose"].diff(6).dropna()).diff(1).dropna(), lags=30)
# plot_pacf((rdf["Rose"].diff(6).dropna()).diff(1).dropna(), lags=30)
# plt.show()
plot_acf(rdf["Rose"].diff().dropna(), lags=30)
plot_pacf(rdf["Rose"].diff().dropna(), lags=30)
plt.show()
Here, we have taken alpha=0.05.
We are going to take the seasonal period as 6. We will keep the p(0) and q(0) parameters same as the ARIMA model.
The Auto-Regressive parameter in an SARIMA model is 'P' which comes from the significant lag after which the PACF plot cuts-off to 2. The Moving-Average parameter in an SARIMA model is 'Q' which comes from the significant lag after which the ACF plot cuts-off to 2.
manual_SARIMA_6 = SARIMAX(train["Rose"],
order=(4,1,2),
seasonal_order=(4,1,2,6),
enforce_stationarity=False,
enforce_invertibility=False)
results_manual_SARIMA_6 = manual_SARIMA_6.fit(maxiter=1000)
display(results_manual_SARIMA_6.summary())
| Dep. Variable: | Rose | No. Observations: | 132 |
|---|---|---|---|
| Model: | SARIMAX(4, 1, 2)x(4, 1, 2, 6) | Log Likelihood | -414.552 |
| Date: | Sun, 15 Aug 2021 | AIC | 855.105 |
| Time: | 08:44:21 | BIC | 888.576 |
| Sample: | 01-31-1980 | HQIC | 868.639 |
| - 12-31-1990 | |||
| Covariance Type: | opg |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| ar.L1 | -0.5124 | 0.415 | -1.235 | 0.217 | -1.326 | 0.301 |
| ar.L2 | -0.4283 | 0.251 | -1.709 | 0.087 | -0.919 | 0.063 |
| ar.L3 | -0.4016 | 0.174 | -2.306 | 0.021 | -0.743 | -0.060 |
| ar.L4 | -0.2521 | 0.159 | -1.585 | 0.113 | -0.564 | 0.060 |
| ma.L1 | 92.5149 | 6.414 | 14.424 | 0.000 | 79.944 | 105.086 |
| ma.L2 | -14.9539 | 41.773 | -0.358 | 0.720 | -96.828 | 66.921 |
| ar.S.L6 | -0.8359 | 0.597 | -1.400 | 0.161 | -2.006 | 0.334 |
| ar.S.L12 | -0.2682 | 0.627 | -0.428 | 0.669 | -1.497 | 0.960 |
| ar.S.L18 | -0.1801 | 0.292 | -0.616 | 0.538 | -0.753 | 0.393 |
| ar.S.L24 | 0.0807 | 0.183 | 0.442 | 0.658 | -0.277 | 0.438 |
| ma.S.L6 | -8.7342 | 15.713 | -0.556 | 0.578 | -39.530 | 22.062 |
| ma.S.L12 | 29.5035 | 6.005 | 4.913 | 0.000 | 17.733 | 41.274 |
| sigma2 | 4.015e-05 | 1.52e-05 | 2.641 | 0.008 | 1.03e-05 | 7e-05 |
| Ljung-Box (L1) (Q): | 0.09 | Jarque-Bera (JB): | 3.18 |
|---|---|---|---|
| Prob(Q): | 0.77 | Prob(JB): | 0.20 |
| Heteroskedasticity (H): | 0.73 | Skew: | 0.37 |
| Prob(H) (two-sided): | 0.37 | Kurtosis: | 3.50 |
manual_SARIMA_12 = SARIMAX(train["Rose"],
order=(4,1,2),
seasonal_order=(4,1,2,12),
enforce_stationarity=False,
enforce_invertibility=False)
results_manual_SARIMA_12 = manual_SARIMA_12.fit(maxiter=1000)
display(results_manual_SARIMA_12.summary())
| Dep. Variable: | Rose | No. Observations: | 132 |
|---|---|---|---|
| Model: | SARIMAX(4, 1, 2)x(4, 1, 2, 12) | Log Likelihood | -277.661 |
| Date: | Sun, 15 Aug 2021 | AIC | 581.322 |
| Time: | 08:44:42 | BIC | 609.983 |
| Sample: | 01-31-1980 | HQIC | 592.663 |
| - 12-31-1990 | |||
| Covariance Type: | opg |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| ar.L1 | -0.9743 | 0.199 | -4.900 | 0.000 | -1.364 | -0.585 |
| ar.L2 | -0.1123 | 0.285 | -0.394 | 0.693 | -0.670 | 0.446 |
| ar.L3 | -0.1044 | 0.277 | -0.377 | 0.706 | -0.647 | 0.438 |
| ar.L4 | -0.1285 | 0.162 | -0.794 | 0.427 | -0.446 | 0.189 |
| ma.L1 | 0.1605 | 174.275 | 0.001 | 0.999 | -341.413 | 341.734 |
| ma.L2 | -0.8395 | 146.327 | -0.006 | 0.995 | -287.634 | 285.955 |
| ar.S.L12 | -0.1443 | 0.364 | -0.396 | 0.692 | -0.858 | 0.569 |
| ar.S.L24 | -0.3596 | 0.227 | -1.587 | 0.113 | -0.804 | 0.085 |
| ar.S.L36 | -0.2153 | 0.106 | -2.040 | 0.041 | -0.422 | -0.008 |
| ar.S.L48 | -0.1195 | 0.093 | -1.281 | 0.200 | -0.302 | 0.063 |
| ma.S.L12 | -0.5157 | 0.343 | -1.502 | 0.133 | -1.189 | 0.157 |
| ma.S.L24 | 0.2084 | 0.373 | 0.558 | 0.577 | -0.523 | 0.940 |
| sigma2 | 215.3499 | 3.75e+04 | 0.006 | 0.995 | -7.34e+04 | 7.38e+04 |
| Ljung-Box (L1) (Q): | 0.03 | Jarque-Bera (JB): | 2.41 |
|---|---|---|---|
| Prob(Q): | 0.86 | Prob(JB): | 0.30 |
| Heteroskedasticity (H): | 0.49 | Skew: | 0.32 |
| Prob(H) (two-sided): | 0.10 | Kurtosis: | 3.68 |
predicted_manual_SARIMA_6 = results_manual_SARIMA_6.get_forecast(steps=len(test))
display(predicted_manual_SARIMA_6.summary_frame(alpha=0.05).head())
rmse_manualsarima6 = rmse(test["Rose"], predicted_manual_SARIMA_6.predicted_mean)
display(rmse_manualsarima6)
temp_resultsDf = pd.DataFrame({"Test RMSE": [rmse_manualsarima6]}, index=["SARIMA(4,1,2)(4,1,2,6)"])
resultsDf = pd.concat([resultsDf, temp_resultsDf])
predicted_manual_SARIMA_12 = results_manual_SARIMA_12.get_forecast(steps=len(test))
display(predicted_manual_SARIMA_12.summary_frame(alpha=0.05).head())
rmse_manualsarima12 = rmse(test["Rose"], predicted_manual_SARIMA_12.predicted_mean)
display(rmse_manualsarima12)
temp_resultsDf = pd.DataFrame({"Test RMSE": [rmse_manualsarima12]}, index=["SARIMA(4,1,2)(4,1,2,12)"])
resultsDf = pd.concat([resultsDf, temp_resultsDf])
| Rose | mean | mean_se | mean_ci_lower | mean_ci_upper |
|---|---|---|---|---|
| 1991-01-31 | 55.125757 | 17.326517 | 21.166407 | 89.085106 |
| 1991-02-28 | 58.436911 | 18.284214 | 22.600510 | 94.273311 |
| 1991-03-31 | 69.979504 | 18.777280 | 33.176711 | 106.782297 |
| 1991-04-30 | 76.484663 | 19.021315 | 39.203570 | 113.765756 |
| 1991-05-31 | 71.735775 | 19.566637 | 33.385872 | 110.085678 |
19.6976473343888
| Rose | mean | mean_se | mean_ci_lower | mean_ci_upper |
|---|---|---|---|---|
| 1991-01-31 | 46.384502 | 14.770602 | 17.434654 | 75.334349 |
| 1991-02-28 | 62.932609 | 14.989645 | 33.553445 | 92.311772 |
| 1991-03-31 | 63.527680 | 14.999316 | 34.129561 | 92.925800 |
| 1991-04-30 | 66.472991 | 15.179512 | 36.721694 | 96.224288 |
| 1991-05-31 | 63.540504 | 15.180389 | 33.787488 | 93.293520 |
17.528551143538984
display(resultsDf.sort_values(by=["Test RMSE"],ascending=True))
| Test RMSE | |
|---|---|
| Alpha=0.3,Beta=0.3,Gamma=0.8,TripleExponentialSmoothing | 10.169599 |
| 2_point_trailing_Moving_Average | 11.529278 |
| 4_point_trailing_Moving_Average | 14.451403 |
| 6_point_trailing_Moving_Average | 14.566327 |
| 9_point_trailing_Moving_Average | 14.727630 |
| RegressionOnTime | 15.268955 |
| ARIMA(3,1,3) | 15.987026 |
| SARIMA(4,1,2)(4,1,2,12) | 17.528551 |
| SARIMA(4,1,2)(4,1,2,6) | 19.697647 |
| Alpha=0.0994,Beta=3.501,Gamma=0.0003,TripleExponentialSmoothing | 21.224438 |
| SARIMA(0,1,2)(2,0,2,12) | 26.928362 |
| SARIMA(0,1,2)(2,0,2,6) | 27.366827 |
| Manual ARIMA(4,1,2) | 33.949388 |
| Alpha=0.102,SimpleExponentialSmoothing | 36.796242 |
| Alpha=0.9,SimpleExponentialSmoothing | 47.504821 |
| SimpleAverageModel | 53.460570 |
| NaiveModel | 79.718773 |
| Alpha=0.3,Beta=0.4,DoubleExponentialSmoothing | 265.567594 |
res_df = pd.DataFrame({'columns': resultsDf.index, 'Test RMSE': resultsDf["Test RMSE"]})
sorted_resDf_values = res_df.sort_values('Test RMSE', ascending=True)
plt.figure(figsize=(10,10))
sns.barplot(x='Test RMSE', y='columns', data=sorted_resDf_values)
plt.xlabel('Test RMSE for all models')
plt.ylabel('Model')
plt.title('Best Models')
plt.show()
full_data_model = SARIMAX(rdf["Rose"], order=(4,1,2), seasonal_order=(4,1,2,12),
enforce_stationarity=False,
enforce_invertibility=False)
results_full_data_model = full_data_model.fit(maxiter=1000)
display(results_full_data_model.summary())
| Dep. Variable: | Rose | No. Observations: | 187 |
|---|---|---|---|
| Model: | SARIMAX(4, 1, 2)x(4, 1, 2, 12) | Log Likelihood | -484.502 |
| Date: | Sun, 15 Aug 2021 | AIC | 995.005 |
| Time: | 08:45:22 | BIC | 1031.457 |
| Sample: | 01-31-1980 | HQIC | 1009.810 |
| - 07-31-1995 | |||
| Covariance Type: | opg |
| coef | std err | z | P>|z| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| ar.L1 | -0.9670 | 0.129 | -7.502 | 0.000 | -1.220 | -0.714 |
| ar.L2 | -0.0276 | 0.185 | -0.149 | 0.881 | -0.390 | 0.335 |
| ar.L3 | 0.0183 | 0.153 | 0.119 | 0.905 | -0.282 | 0.319 |
| ar.L4 | -0.0203 | 0.091 | -0.223 | 0.823 | -0.199 | 0.158 |
| ma.L1 | 0.1385 | 5.897 | 0.023 | 0.981 | -11.420 | 11.697 |
| ma.L2 | -0.8618 | 5.093 | -0.169 | 0.866 | -10.844 | 9.121 |
| ar.S.L12 | -0.6689 | 0.187 | -3.585 | 0.000 | -1.035 | -0.303 |
| ar.S.L24 | -0.1390 | 0.169 | -0.822 | 0.411 | -0.470 | 0.192 |
| ar.S.L36 | -0.1890 | 0.081 | -2.333 | 0.020 | -0.348 | -0.030 |
| ar.S.L48 | -0.1751 | 0.045 | -3.851 | 0.000 | -0.264 | -0.086 |
| ma.S.L12 | 0.1233 | 0.217 | 0.567 | 0.571 | -0.303 | 0.550 |
| ma.S.L24 | -0.3107 | 0.187 | -1.664 | 0.096 | -0.677 | 0.055 |
| sigma2 | 156.8840 | 932.480 | 0.168 | 0.866 | -1670.743 | 1984.511 |
| Ljung-Box (L1) (Q): | 0.01 | Jarque-Bera (JB): | 7.86 |
|---|---|---|---|
| Prob(Q): | 0.92 | Prob(JB): | 0.02 |
| Heteroskedasticity (H): | 0.20 | Skew: | 0.26 |
| Prob(H) (two-sided): | 0.00 | Kurtosis: | 4.13 |
predicted_manual_SARIMA_12_full_data = results_full_data_model.get_forecast(steps=12)
display(predicted_manual_SARIMA_12_full_data.summary_frame(alpha=0.05).head())
rmse_full_data = rmse(rdf["Rose"], results_full_data_model.fittedvalues)
display(rmse_full_data)
| Rose | mean | mean_se | mean_ci_lower | mean_ci_upper |
|---|---|---|---|---|
| 1995-08-31 | 44.225709 | 12.581601 | 19.566225 | 68.885194 |
| 1995-09-30 | 45.910636 | 12.746342 | 20.928266 | 70.893007 |
| 1995-10-31 | 47.574684 | 12.796922 | 22.493179 | 72.656190 |
| 1995-11-30 | 59.549598 | 13.045578 | 33.980736 | 85.118460 |
| 1995-12-31 | 86.410149 | 13.067677 | 60.797974 | 112.022324 |
34.85335076902319
pred_full_manual_SARIMA_data = predicted_manual_SARIMA_12_full_data.summary_frame(alpha=0.05).set_index(pd.date_range(start="1995-08-31", end="1996-07-31", freq="M"))
axis = rdf["Rose"].plot(label="Observed")
pred_full_manual_SARIMA_data["mean"].plot(ax=axis, label="Forecast", alpha=0.7)
axis.fill_between(pred_full_manual_SARIMA_data.index,
pred_full_manual_SARIMA_data["mean_ci_lower"],
pred_full_manual_SARIMA_data["mean_ci_upper"],
color="green",
alpha=0.15)
axis.set_xlabel("Year-Months")
axis.set_ylabel("Rose")
plt.title("Prediction for future 12 months")
plt.legend(loc="best")
plt.show()